[ 649.942729] env[68282]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68282) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 649.943133] env[68282]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68282) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 649.943196] env[68282]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68282) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 649.943512] env[68282]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 650.032040] env[68282]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68282) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 650.042610] env[68282]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=68282) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 650.181031] env[68282]: INFO nova.virt.driver [None req-a582f3c2-75f9-4998-b7cb-77981939af86 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 650.259532] env[68282]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.259668] env[68282]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.259778] env[68282]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68282) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 653.436785] env[68282]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-7bbfba13-b504-45dc-b714-9e91c34fd4f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.452897] env[68282]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68282) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 653.453100] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1c884d68-100c-429c-b6eb-0c8caf128482 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.485382] env[68282]: INFO oslo_vmware.api [-] Successfully established new session; session ID is b39b0. [ 653.485540] env[68282]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.226s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.486103] env[68282]: INFO nova.virt.vmwareapi.driver [None req-a582f3c2-75f9-4998-b7cb-77981939af86 None None] VMware vCenter version: 7.0.3 [ 653.489558] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760aa752-ecc4-4ce0-b3a8-3e467cd21aac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.506634] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcedaab-c9bc-49a4-9e80-b215b5cb4feb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.512500] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a9003a-a827-44ef-b05f-c7c897aa737e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.519143] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e73760f-dc81-484e-8f2a-07d6e518e7e2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.532289] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0876d65d-0d2a-402b-9f3a-fc93a21de9f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.538112] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8246a6cd-cd4e-4292-bf11-755fbe79a5c9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.568298] env[68282]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-7dd70e07-326a-4e90-9882-9f735fed9ba5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.573256] env[68282]: DEBUG nova.virt.vmwareapi.driver [None req-a582f3c2-75f9-4998-b7cb-77981939af86 None None] Extension org.openstack.compute already exists. {{(pid=68282) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 653.575853] env[68282]: INFO nova.compute.provider_config [None req-a582f3c2-75f9-4998-b7cb-77981939af86 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 653.598495] env[68282]: DEBUG nova.context [None req-a582f3c2-75f9-4998-b7cb-77981939af86 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),60b0edd8-8932-4926-a0b9-6ac5ac112f5a(cell1) {{(pid=68282) load_cells /opt/stack/nova/nova/context.py:464}} [ 653.600600] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.600818] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.601483] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.601908] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Acquiring lock "60b0edd8-8932-4926-a0b9-6ac5ac112f5a" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.602116] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Lock "60b0edd8-8932-4926-a0b9-6ac5ac112f5a" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.603115] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Lock "60b0edd8-8932-4926-a0b9-6ac5ac112f5a" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.623718] env[68282]: INFO dbcounter [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Registered counter for database nova_cell0 [ 653.631978] env[68282]: INFO dbcounter [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Registered counter for database nova_cell1 [ 653.634952] env[68282]: DEBUG oslo_db.sqlalchemy.engines [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68282) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 653.635547] env[68282]: DEBUG oslo_db.sqlalchemy.engines [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68282) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 653.639778] env[68282]: DEBUG dbcounter [-] [68282] Writer thread running {{(pid=68282) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 653.640670] env[68282]: DEBUG dbcounter [-] [68282] Writer thread running {{(pid=68282) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 653.643283] env[68282]: ERROR nova.db.main.api [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 653.643283] env[68282]: result = function(*args, **kwargs) [ 653.643283] env[68282]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.643283] env[68282]: return func(*args, **kwargs) [ 653.643283] env[68282]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 653.643283] env[68282]: result = fn(*args, **kwargs) [ 653.643283] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 653.643283] env[68282]: return f(*args, **kwargs) [ 653.643283] env[68282]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 653.643283] env[68282]: return db.service_get_minimum_version(context, binaries) [ 653.643283] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 653.643283] env[68282]: _check_db_access() [ 653.643283] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 653.643283] env[68282]: stacktrace = ''.join(traceback.format_stack()) [ 653.643283] env[68282]: [ 653.644047] env[68282]: ERROR nova.db.main.api [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 653.644047] env[68282]: result = function(*args, **kwargs) [ 653.644047] env[68282]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.644047] env[68282]: return func(*args, **kwargs) [ 653.644047] env[68282]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 653.644047] env[68282]: result = fn(*args, **kwargs) [ 653.644047] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 653.644047] env[68282]: return f(*args, **kwargs) [ 653.644047] env[68282]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 653.644047] env[68282]: return db.service_get_minimum_version(context, binaries) [ 653.644047] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 653.644047] env[68282]: _check_db_access() [ 653.644047] env[68282]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 653.644047] env[68282]: stacktrace = ''.join(traceback.format_stack()) [ 653.644047] env[68282]: [ 653.644450] env[68282]: WARNING nova.objects.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Failed to get minimum service version for cell 60b0edd8-8932-4926-a0b9-6ac5ac112f5a [ 653.644554] env[68282]: WARNING nova.objects.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 653.644975] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Acquiring lock "singleton_lock" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.645153] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Acquired lock "singleton_lock" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.645412] env[68282]: DEBUG oslo_concurrency.lockutils [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Releasing lock "singleton_lock" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.645728] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Full set of CONF: {{(pid=68282) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 653.645875] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ******************************************************************************** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 653.646015] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] Configuration options gathered from: {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 653.646165] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 653.646354] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 653.646483] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ================================================================================ {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 653.646694] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] allow_resize_to_same_host = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.646866] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] arq_binding_timeout = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647008] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] backdoor_port = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647147] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] backdoor_socket = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647317] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] block_device_allocate_retries = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647478] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] block_device_allocate_retries_interval = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647650] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cert = self.pem {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647820] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.647990] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute_monitors = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.648175] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] config_dir = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.648403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] config_drive_format = iso9660 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.648541] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.648717] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] config_source = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.648933] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] console_host = devstack {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.649112] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] control_exchange = nova {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.649286] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cpu_allocation_ratio = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.649452] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] daemon = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.649621] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] debug = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.649780] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_access_ip_network_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650016] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_availability_zone = nova {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650153] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_ephemeral_format = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650320] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_green_pool_size = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650554] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650721] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] default_schedule_zone = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.650884] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] disk_allocation_ratio = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651057] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] enable_new_services = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651238] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] enabled_apis = ['osapi_compute'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] enabled_ssl_apis = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651563] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] flat_injected = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651722] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] force_config_drive = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.651882] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] force_raw_images = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652069] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] graceful_shutdown_timeout = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652238] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] heal_instance_info_cache_interval = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652455] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] host = cpu-1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652635] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652802] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.652967] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.653204] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.653368] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_build_timeout = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.653531] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_delete_interval = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.653700] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_format = [instance: %(uuid)s] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.653864] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_name_template = instance-%08x {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654032] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_usage_audit = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654211] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_usage_audit_period = month {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654377] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654542] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654707] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] internal_service_availability_zone = internal {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.654862] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] key = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655030] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] live_migration_retry_count = 30 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655198] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_config_append = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655364] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655523] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_dir = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655680] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655808] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_options = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.655969] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_rotate_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656152] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_rotate_interval_type = days {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656322] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] log_rotation_type = none {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656455] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656584] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656763] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.656921] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657065] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657231] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] long_rpc_timeout = 1800 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657650] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_concurrent_builds = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657650] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_concurrent_live_migrations = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657753] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_concurrent_snapshots = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.657848] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_local_block_devices = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658014] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_logfile_count = 30 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658179] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] max_logfile_size_mb = 200 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658339] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] maximum_instance_delete_attempts = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658506] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metadata_listen = 0.0.0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658670] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metadata_listen_port = 8775 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.658835] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metadata_workers = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659028] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] migrate_max_retries = -1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659215] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] mkisofs_cmd = genisoimage {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659420] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659555] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] my_ip = 10.180.1.21 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659720] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] network_allocate_retries = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.659912] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.660149] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.660333] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] osapi_compute_listen_port = 8774 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.660506] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] osapi_compute_unique_server_name_scope = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.660679] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] osapi_compute_workers = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.660842] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] password_length = 12 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661015] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] periodic_enable = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661192] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] periodic_fuzzy_delay = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661363] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] pointer_model = usbtablet {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661532] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] preallocate_images = none {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661694] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] publish_errors = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661827] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] pybasedir = /opt/stack/nova {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.661988] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ram_allocation_ratio = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662169] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rate_limit_burst = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662341] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rate_limit_except_level = CRITICAL {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662503] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rate_limit_interval = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662668] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reboot_timeout = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662830] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reclaim_instance_interval = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.662993] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] record = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663178] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reimage_timeout_per_gb = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663346] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] report_interval = 120 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663509] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rescue_timeout = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663669] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reserved_host_cpus = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663830] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reserved_host_disk_mb = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.663989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reserved_host_memory_mb = 512 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664167] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] reserved_huge_pages = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664328] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] resize_confirm_window = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664489] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] resize_fs_using_block_device = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664649] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] resume_guests_state_on_host_boot = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664817] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.664980] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] rpc_response_timeout = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665157] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] run_external_periodic_tasks = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665326] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] running_deleted_instance_action = reap {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665487] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665645] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] running_deleted_instance_timeout = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665806] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler_instance_sync_interval = 120 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.665974] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_down_time = 720 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666159] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] servicegroup_driver = db {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666323] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] shelved_offload_time = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666485] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] shelved_poll_interval = 3600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666654] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] shutdown_timeout = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666816] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] source_is_ipv6 = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.666976] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ssl_only = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.667236] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.667407] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] sync_power_state_interval = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.667569] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] sync_power_state_pool_size = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.667737] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] syslog_log_facility = LOG_USER {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.667893] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] tempdir = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668065] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] timeout_nbd = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668238] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] transport_url = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668400] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] update_resources_interval = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668560] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_cow_images = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668718] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_eventlog = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.668882] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_journal = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669075] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_json = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669247] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_rootwrap_daemon = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669409] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_stderr = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669567] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] use_syslog = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669725] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vcpu_pin_set = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.669907] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plugging_is_fatal = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670125] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plugging_timeout = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670303] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] virt_mkfs = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670466] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] volume_usage_poll_interval = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670628] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] watch_log_file = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670799] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] web = /usr/share/spice-html5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 653.670982] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_concurrency.disable_process_locking = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.671289] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.671469] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.671638] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.671810] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.671980] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.672192] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.672388] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.auth_strategy = keystone {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.672557] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.compute_link_prefix = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.672739] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.672921] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.dhcp_domain = novalocal {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673110] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.enable_instance_password = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673283] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.glance_link_prefix = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673456] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673632] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673797] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.instance_list_per_project_cells = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.673962] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.list_records_by_skipping_down_cells = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674142] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.local_metadata_per_cell = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674313] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.max_limit = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674482] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.metadata_cache_expiration = 15 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674660] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.neutron_default_tenant_id = default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674829] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.use_neutron_default_nets = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.674997] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.675179] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.675346] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.675519] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.675689] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_dynamic_targets = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.675858] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_jsonfile_path = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676053] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676254] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.backend = dogpile.cache.memcached {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676423] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.backend_argument = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676595] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.config_prefix = cache.oslo {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676765] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.dead_timeout = 60.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.676931] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.debug_cache_backend = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677112] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.enable_retry_client = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677276] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.enable_socket_keepalive = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677445] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.enabled = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677610] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.enforce_fips_mode = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677774] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.expiration_time = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.677938] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.hashclient_retry_attempts = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678122] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678291] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_dead_retry = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678451] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_password = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678617] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678781] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.678975] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_pool_maxsize = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.679172] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.679342] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_sasl_enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.679522] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.679692] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.679855] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.memcache_username = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680073] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.proxies = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680255] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_password = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680431] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680611] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680782] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_server = localhost:6379 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.680951] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_socket_timeout = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681130] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.redis_username = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681297] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.retry_attempts = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681463] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.retry_delay = 0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681635] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.socket_keepalive_count = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681798] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.socket_keepalive_idle = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.681961] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.socket_keepalive_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682141] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.tls_allowed_ciphers = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682304] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.tls_cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682465] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.tls_certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682627] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.tls_enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682786] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cache.tls_keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.682960] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683154] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.auth_type = password {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683318] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683494] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683658] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683826] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.683990] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.cross_az_attach = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.684196] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.debug = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.684376] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.endpoint_template = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.684542] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.http_retries = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.684705] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.684865] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685050] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.os_region_name = RegionOne {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685222] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685383] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cinder.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685555] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685717] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.cpu_dedicated_set = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.685878] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.cpu_shared_set = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686063] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.image_type_exclude_list = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686234] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686399] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686567] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686735] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.686937] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687088] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.resource_provider_association_refresh = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687258] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.shutdown_retry_interval = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687441] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687622] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] conductor.workers = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687798] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] console.allowed_origins = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.687962] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] console.ssl_ciphers = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.688152] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] console.ssl_minimum_version = default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.688323] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] consoleauth.enforce_session_timeout = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.688494] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] consoleauth.token_ttl = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.688662] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.688821] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689032] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689196] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689360] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689521] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689687] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.689849] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690070] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690256] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690419] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690578] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690738] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.690909] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.service_type = accelerator {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691087] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691250] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691411] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691570] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691752] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.691918] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] cyborg.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692114] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.backend = sqlalchemy {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692292] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.connection = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692461] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.connection_debug = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692632] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.connection_parameters = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692798] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.connection_recycle_time = 3600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.692963] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.connection_trace = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.693148] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.db_inc_retry_interval = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.693348] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.db_max_retries = 20 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.693521] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.db_max_retry_interval = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.693687] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.db_retry_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.693853] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.max_overflow = 50 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694029] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.max_pool_size = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694205] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.max_retries = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694383] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694542] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.mysql_wsrep_sync_wait = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694700] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.pool_timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.694862] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.retry_interval = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695032] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.slave_connection = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695206] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.sqlite_synchronous = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695372] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] database.use_db_reconnect = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695552] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.backend = sqlalchemy {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695724] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.connection = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.695894] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.connection_debug = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.696078] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.connection_parameters = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.connection_recycle_time = 3600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.connection_trace = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.db_inc_retry_interval = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.db_max_retries = 20 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.db_max_retry_interval = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.db_retry_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.max_overflow = 50 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.max_pool_size = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.max_retries = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699647] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.pool_timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.retry_interval = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.slave_connection = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] api_database.sqlite_synchronous = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] devices.enabled_mdev_types = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699807] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ephemeral_storage_encryption.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.api_servers = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.699989] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700147] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700302] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700431] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700596] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.debug = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700764] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.default_trusted_certificate_ids = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.700928] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.enable_certificate_validation = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701107] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.enable_rbd_download = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701273] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701439] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701600] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701760] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.701963] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702090] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.num_retries = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702268] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.rbd_ceph_conf = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702431] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.rbd_connect_timeout = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702600] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.rbd_pool = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702768] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.rbd_user = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.702928] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703106] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703265] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703438] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.service_type = image {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703592] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703753] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.703913] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704085] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704267] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704434] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.verify_glance_signatures = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704594] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] glance.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704761] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] guestfs.debug = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.704926] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] mks.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.705302] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.705494] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.manager_interval = 2400 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.705662] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.precache_concurrency = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.705834] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.remove_unused_base_images = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706021] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706194] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706374] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] image_cache.subdirectory_name = _base {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706552] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.api_max_retries = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706720] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.api_retry_interval = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.706884] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707064] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.auth_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707231] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707392] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707557] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707721] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.conductor_group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.707880] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708051] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708225] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708426] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708594] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708758] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.708941] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.709139] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.peer_list = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.709306] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.709482] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.709671] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.serial_console_state_timeout = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.709837] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710026] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.service_type = baremetal {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710192] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.shard = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710360] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710521] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710683] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.710842] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711035] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711208] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ironic.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711394] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711569] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] key_manager.fixed_key = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711750] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.711913] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.barbican_api_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712089] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.barbican_endpoint = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712262] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.barbican_endpoint_type = public {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712421] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.barbican_region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712578] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712734] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.712897] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713073] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713239] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.number_of_retries = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713565] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.retry_delay = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713727] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.send_service_user_token = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.713888] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714059] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714224] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.verify_ssl = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714382] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican.verify_ssl_path = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714549] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714716] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.auth_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.714877] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715050] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715220] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715383] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715541] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715703] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.715860] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] barbican_service_user.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716035] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.approle_role_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716205] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.approle_secret_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716363] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716521] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716683] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.716844] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717035] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717219] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.kv_mountpoint = secret {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717380] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.kv_path = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717543] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.kv_version = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717701] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.namespace = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.717858] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.root_token_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718030] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718200] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.ssl_ca_crt_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718359] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718520] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.use_ssl = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718695] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.718865] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719064] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.auth_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719234] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719393] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719556] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719716] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.719878] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720056] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720230] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720391] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720550] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720707] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.720866] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721032] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721197] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721371] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.service_type = identity {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721551] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721725] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.721887] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722061] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722245] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722406] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] keystone.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722603] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.connection_uri = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722766] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_mode = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.722930] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723117] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_models = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723293] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_power_governor_high = performance {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723462] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723626] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_power_management = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723796] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.723960] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.device_detach_attempts = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724140] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.device_detach_timeout = 20 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724305] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.disk_cachemodes = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724463] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.disk_prefix = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724629] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.enabled_perf_events = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724795] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.file_backed_memory = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.724960] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.gid_maps = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725136] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.hw_disk_discard = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725299] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.hw_machine_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725479] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_rbd_ceph_conf = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725652] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725815] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.725985] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_rbd_glance_store_name = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.726173] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_rbd_pool = rbd {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.726342] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_type = default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.726501] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.images_volume_group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.726663] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.inject_key = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.726825] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.inject_partition = -2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727015] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.inject_password = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727198] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.iscsi_iface = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727364] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.iser_use_multipath = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727528] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727691] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.727853] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_downtime = 500 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728026] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728194] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728357] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_inbound_addr = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728518] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728677] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.728840] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_scheme = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.729065] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_timeout_action = abort {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.729332] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_tunnelled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.729528] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_uri = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.729700] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.live_migration_with_native_tls = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.729868] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.max_queues = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.730049] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.730292] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.730460] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.nfs_mount_options = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.730746] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.730924] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731110] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731276] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731442] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731607] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_pcie_ports = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731776] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.731941] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.pmem_namespaces = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.732118] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.quobyte_client_cfg = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.732475] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.732724] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.732913] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733102] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733274] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rbd_secret_uuid = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733440] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rbd_user = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733607] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733786] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.733951] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rescue_image_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.734131] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rescue_kernel_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.734294] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rescue_ramdisk_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.734466] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.734629] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.rx_queue_size = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.734800] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.smbfs_mount_options = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735090] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735273] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.snapshot_compression = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735438] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.snapshot_image_format = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735656] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735825] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.sparse_logical_volumes = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.735991] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.swtpm_enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.736181] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.swtpm_group = tss {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.736355] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.swtpm_user = tss {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.736528] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.sysinfo_serial = unique {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.736689] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.tb_cache_size = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.736852] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.tx_queue_size = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737055] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.uid_maps = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737234] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.use_virtio_for_bridges = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737410] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.virt_type = kvm {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737585] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.volume_clear = zero {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737752] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.volume_clear_size = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.737920] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.volume_use_multipath = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.738097] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_cache_path = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.738273] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.738441] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.738605] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.738775] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739094] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739285] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.vzstorage_mount_user = stack {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739458] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739637] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739815] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.auth_type = password {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.739985] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740162] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740329] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740493] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740654] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740829] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.default_floating_pool = public {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.740992] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.741176] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.extension_sync_interval = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.741367] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.http_retries = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.741536] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.741698] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.741860] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742052] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742222] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742389] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.ovs_bridge = br-int {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742556] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.physnets = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742727] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.region_name = RegionOne {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.742888] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743072] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.service_metadata_proxy = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743237] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743407] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.service_type = network {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743571] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743731] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.743890] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744068] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744255] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744433] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] neutron.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744607] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] notifications.bdms_in_notifications = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744787] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] notifications.default_level = INFO {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.744964] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] notifications.notification_format = unversioned {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.745151] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] notifications.notify_on_state_change = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.745332] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.745509] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] pci.alias = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.745682] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] pci.device_spec = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.745848] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] pci.report_in_placement = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746031] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746212] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.auth_type = password {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746380] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746542] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746702] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.746866] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747036] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747206] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747369] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.default_domain_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747530] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.default_domain_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747692] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.domain_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.747852] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.domain_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748022] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748193] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748357] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748519] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748679] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.748851] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.password = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749051] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.project_domain_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749235] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.project_domain_name = Default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749407] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.project_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749583] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.project_name = service {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749756] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.region_name = RegionOne {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.749941] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750133] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750311] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.service_type = placement {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750479] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750644] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750811] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.750975] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.system_scope = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.751153] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.751348] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.trust_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.751520] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.user_domain_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.751693] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.user_domain_name = Default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.751854] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.user_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752039] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.username = placement {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752229] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752395] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] placement.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752576] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.cores = 20 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752742] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.count_usage_from_placement = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.752917] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753116] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.injected_file_content_bytes = 10240 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753292] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.injected_file_path_length = 255 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753458] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.injected_files = 5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753630] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.instances = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753800] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.key_pairs = 100 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.753969] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.metadata_items = 128 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754155] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.ram = 51200 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754323] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.recheck_quota = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754492] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.server_group_members = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754661] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] quota.server_groups = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754832] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.754999] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.755178] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.image_metadata_prefilter = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.755342] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.755506] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.max_attempts = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.755670] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.max_placement_results = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.755837] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756010] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756185] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756362] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] scheduler.workers = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756543] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756716] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.756894] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757083] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757253] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757419] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757582] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757771] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.757939] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.host_subset_size = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758120] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758285] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758451] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758615] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.isolated_hosts = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758779] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.isolated_images = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.758969] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.759159] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.759335] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.759501] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.pci_in_placement = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.759666] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.759828] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760023] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760190] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760358] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760530] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760697] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.track_instance_changes = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.760877] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.761059] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metrics.required = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.761250] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metrics.weight_multiplier = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.761434] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.761605] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] metrics.weight_setting = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.761922] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.762119] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.762306] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.port_range = 10000:20000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.762487] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.762654] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.762828] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] serial_console.serialproxy_port = 6083 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763007] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763197] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.auth_type = password {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763363] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763524] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763687] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.763849] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764014] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764197] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.send_service_user_token = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764365] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764524] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] service_user.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764694] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.agent_enabled = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.764868] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.765197] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.765399] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.765574] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.html5proxy_port = 6082 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.765741] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.image_compression = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.765907] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.jpeg_compression = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766083] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.playback_compression = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766262] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.server_listen = 127.0.0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766436] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766600] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.streaming_mode = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766765] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] spice.zlib_compression = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.766934] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] upgrade_levels.baseapi = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767123] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] upgrade_levels.compute = auto {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767293] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] upgrade_levels.conductor = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767455] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] upgrade_levels.scheduler = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767623] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767790] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.767953] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768130] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768298] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768459] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768619] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768780] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.768966] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vendordata_dynamic_auth.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.769171] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.api_retry_count = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.769340] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.ca_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.769598] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.769810] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.cluster_name = testcl1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.769986] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.connection_pool_size = 10 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.770166] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.console_delay_seconds = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.770345] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.datastore_regex = ^datastore.* {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.770551] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.770728] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.host_password = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.770901] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.host_port = 443 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771085] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.host_username = administrator@vsphere.local {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771286] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.insecure = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771468] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.integration_bridge = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771634] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.maximum_objects = 100 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771794] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.pbm_default_policy = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.771959] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.pbm_enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772136] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.pbm_wsdl_location = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772306] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772465] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.serial_port_proxy_uri = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772622] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.serial_port_service_uri = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772789] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.task_poll_interval = 0.5 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.772958] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.use_linked_clone = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.773141] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.vnc_keymap = en-us {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.773307] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.vnc_port = 5900 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.773470] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vmware.vnc_port_total = 10000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.773652] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.auth_schemes = ['none'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.773823] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.774132] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.774324] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.774495] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.novncproxy_port = 6080 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.774672] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.server_listen = 127.0.0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.774844] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775010] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.vencrypt_ca_certs = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775179] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.vencrypt_client_cert = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775338] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vnc.vencrypt_client_key = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775516] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775682] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_deep_image_inspection = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.775846] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776017] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776184] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776350] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.disable_rootwrap = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776512] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.enable_numa_live_migration = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776677] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.776842] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777009] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777180] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.libvirt_disable_apic = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777340] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777500] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777660] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777823] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.777984] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.778159] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.778321] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.778480] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.778636] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.778797] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779020] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779205] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.client_socket_timeout = 900 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779377] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.default_pool_size = 1000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779545] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.keep_alive = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779710] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.max_header_line = 16384 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.779878] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780054] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.ssl_ca_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780225] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.ssl_cert_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780387] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.ssl_key_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780552] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.tcp_keepidle = 600 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780729] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.780897] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] zvm.ca_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.781074] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] zvm.cloud_connector_url = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.781385] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.781566] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] zvm.reachable_timeout = 300 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.781749] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.enforce_new_defaults = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.781922] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.enforce_scope = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782114] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.policy_default_rule = default {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782299] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782478] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.policy_file = policy.yaml {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782650] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782835] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.782975] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.783153] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.783321] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.783493] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.783667] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.783842] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.connection_string = messaging:// {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784019] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.enabled = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784194] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.es_doc_type = notification {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784363] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.es_scroll_size = 10000 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784534] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.es_scroll_time = 2m {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784700] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.filter_error_trace = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.784868] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.hmac_keys = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785047] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.sentinel_service_name = mymaster {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785223] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.socket_timeout = 0.1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785388] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.trace_requests = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785549] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler.trace_sqlalchemy = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785728] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler_jaeger.process_tags = {} {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.785892] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler_jaeger.service_name_prefix = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786070] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] profiler_otlp.service_name_prefix = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786240] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] remote_debug.host = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] remote_debug.port = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786582] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786746] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.786911] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787085] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787250] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787410] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787569] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787729] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.787891] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788077] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788241] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788412] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788581] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788752] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.788946] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.789141] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.789309] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.789489] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.789657] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.789822] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790034] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790199] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790366] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790537] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790702] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.790867] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791042] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791221] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791414] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791589] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791769] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.791959] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.792160] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.792341] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.792516] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.792681] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.792873] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793056] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_notifications.retry = -1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793247] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793427] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793603] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.auth_section = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793770] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.auth_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.793935] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.cafile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794108] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.certfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794273] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.collect_timing = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794432] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.connect_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794591] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.connect_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794753] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.endpoint_id = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.794914] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.endpoint_override = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795091] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.insecure = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795255] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.keyfile = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795413] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.max_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795572] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.min_version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795731] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.region_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.795891] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.retriable_status_codes = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796064] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.service_name = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796228] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.service_type = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796389] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.split_loggers = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796550] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.status_code_retries = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796709] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.status_code_retry_delay = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.796872] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.timeout = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797039] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.valid_interfaces = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797206] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_limit.version = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797375] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_reports.file_event_handler = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797542] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797705] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] oslo_reports.log_dir = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.797877] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798045] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798214] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798383] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798546] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798708] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.798895] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799085] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799257] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799426] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799591] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799751] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] vif_plug_ovs_privileged.user = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.799938] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.800148] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.800331] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.800506] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.800678] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.800845] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801021] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801198] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801430] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801613] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.isolate_vif = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801793] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.801962] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802157] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802332] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802498] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_vif_ovs.per_port_bridge = False {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802666] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_brick.lock_path = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802833] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.802997] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.803188] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.capabilities = [21] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.803351] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.803510] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.helper_command = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.803680] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.803847] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804043] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] privsep_osbrick.user = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804240] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804403] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.group = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804562] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.helper_command = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804730] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.804895] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.805069] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] nova_sys_admin.user = None {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 653.805203] env[68282]: DEBUG oslo_service.service [None req-22066f5c-b5f6-4ab3-b603-5c5138421a70 None None] ******************************************************************************** {{(pid=68282) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 653.805632] env[68282]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 653.819769] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Getting list of instances from cluster (obj){ [ 653.819769] env[68282]: value = "domain-c8" [ 653.819769] env[68282]: _type = "ClusterComputeResource" [ 653.819769] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 653.821058] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ce4a64-1440-4c81-916a-8575e0dd7fdc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.830168] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Got total of 0 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 653.830714] env[68282]: WARNING nova.virt.vmwareapi.driver [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 653.831194] env[68282]: INFO nova.virt.node [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Generated node identity 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e [ 653.831445] env[68282]: INFO nova.virt.node [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Wrote node identity 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e to /opt/stack/data/n-cpu-1/compute_id [ 653.847325] env[68282]: WARNING nova.compute.manager [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Compute nodes ['1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 653.894803] env[68282]: INFO nova.compute.manager [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 653.931193] env[68282]: WARNING nova.compute.manager [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 653.931428] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.938075] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.938075] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.938075] env[68282]: DEBUG nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 653.938075] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd35f2e-64a6-465d-98cd-7aa118916f4a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.941381] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4b6842-6ed7-4525-829b-f358347d69aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.956094] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f750cfd3-439d-44cd-844b-2e8bc382870c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.962466] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed383d0-6bbb-4a82-971f-b6e615d72694 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.993165] env[68282]: DEBUG nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180953MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 653.993326] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.993518] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.008212] env[68282]: WARNING nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] No compute node record for cpu-1:1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e could not be found. [ 654.023229] env[68282]: INFO nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e [ 654.088136] env[68282]: DEBUG nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 654.088359] env[68282]: DEBUG nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 654.241749] env[68282]: INFO nova.scheduler.client.report [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] [req-2416ab0d-c690-40e8-bea7-44cc0e5b0b72] Created resource provider record via placement API for resource provider with UUID 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 654.262303] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d409d3cd-4d4d-4fe5-8a14-1513b4d8b7fd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.269638] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13326755-80d7-4843-a949-59224730604b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.298767] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e97025a5-919b-49de-ad38-b5c797643b3b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.305382] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf7cf19-a718-4623-9663-7a859ebd194d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.318390] env[68282]: DEBUG nova.compute.provider_tree [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 654.391897] env[68282]: DEBUG nova.scheduler.client.report [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Updated inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 654.392266] env[68282]: DEBUG nova.compute.provider_tree [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Updating resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e generation from 0 to 1 during operation: update_inventory {{(pid=68282) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 654.392464] env[68282]: DEBUG nova.compute.provider_tree [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 654.483109] env[68282]: DEBUG nova.compute.provider_tree [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Updating resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e generation from 1 to 2 during operation: update_traits {{(pid=68282) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 654.516539] env[68282]: DEBUG nova.compute.resource_tracker [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 654.516877] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.523s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.517142] env[68282]: DEBUG nova.service [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Creating RPC server for service compute {{(pid=68282) start /opt/stack/nova/nova/service.py:182}} [ 654.537129] env[68282]: DEBUG nova.service [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] Join ServiceGroup membership for this service compute {{(pid=68282) start /opt/stack/nova/nova/service.py:199}} [ 654.537323] env[68282]: DEBUG nova.servicegroup.drivers.db [None req-f361a7d6-2325-4634-8317-3ecd0c9d1f47 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68282) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 663.641390] env[68282]: DEBUG dbcounter [-] [68282] Writing DB stats nova_cell1:SELECT=1 {{(pid=68282) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 663.642413] env[68282]: DEBUG dbcounter [-] [68282] Writing DB stats nova_cell0:SELECT=1 {{(pid=68282) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 683.541693] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_power_states {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 683.555694] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 683.555694] env[68282]: value = "domain-c8" [ 683.555694] env[68282]: _type = "ClusterComputeResource" [ 683.555694] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 683.556864] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30744358-0d6c-4374-93f0-de1e90bcae35 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.567019] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 0 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 683.567019] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 683.567019] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 683.567019] env[68282]: value = "domain-c8" [ 683.567019] env[68282]: _type = "ClusterComputeResource" [ 683.567019] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 683.569752] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecacdfa0-a0e5-4e3c-b6c7-d45fa9bbe842 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.577757] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 0 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 691.919094] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "7423b2c7-699d-4c1b-82b9-683a2c08a261" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.919506] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "7423b2c7-699d-4c1b-82b9-683a2c08a261" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.945711] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 692.078480] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.078754] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.080381] env[68282]: INFO nova.compute.claims [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.214232] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab51703-f7a5-4f75-b15b-60554e25bcdd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.222594] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914600b9-c396-4f77-84ce-96d8a2600498 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.258035] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be44805-7569-45a3-8079-c52cb64d0892 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.266600] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6708edb2-5c19-4acb-8795-cf9ba7dbdf29 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.283557] env[68282]: DEBUG nova.compute.provider_tree [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.298662] env[68282]: DEBUG nova.scheduler.client.report [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 692.319982] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.320585] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 692.368012] env[68282]: DEBUG nova.compute.utils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 692.372026] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 692.372026] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 692.388608] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 692.483150] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 695.522189] env[68282]: DEBUG nova.policy [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b28b2a167f744f59b0a54596a76ebf55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df7574d0e6b64156aab8a21257521ec7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 696.007242] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Successfully created port: ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.309640] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 696.309913] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 696.310082] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.310288] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 696.310810] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.312017] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 696.312017] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 696.312017] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 696.312017] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 696.312017] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 696.312320] env[68282]: DEBUG nova.virt.hardware [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 696.313198] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30001cb0-5edf-4bb6-86da-2c32209d03cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.323137] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338c4e98-1f08-427b-9103-680a5b63b9c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.346176] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef139f8-18f9-4593-86f6-c4372a35fc2d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.759573] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Successfully updated port: ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.779861] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.780039] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquired lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.780200] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.852318] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.917859] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "13b2d77c-448b-4558-b5ef-005064806213" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.917859] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "13b2d77c-448b-4558-b5ef-005064806213" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.939871] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 698.024447] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.024789] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.028622] env[68282]: INFO nova.compute.claims [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 698.168032] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21412cfb-60e4-4a69-90da-8b310e5ed5cd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.183647] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766e2869-c8c1-48a4-be06-0e31c96a7d0f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.221538] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fb5c00-5294-4ea5-8e44-d311883f6c57 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.230478] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee90b677-ce8b-42c0-ad71-be64d4d02d1c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.247185] env[68282]: DEBUG nova.compute.provider_tree [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.259592] env[68282]: DEBUG nova.scheduler.client.report [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 698.281157] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.256s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.282026] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 698.331629] env[68282]: DEBUG nova.compute.utils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 698.333190] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 698.347937] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Updating instance_info_cache with network_info: [{"id": "ff1abd41-5680-443f-bc35-d7d1caa08147", "address": "fa:16:3e:73:c3:8a", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1abd41-56", "ovs_interfaceid": "ff1abd41-5680-443f-bc35-d7d1caa08147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.350285] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 698.368210] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Releasing lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.368529] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Instance network_info: |[{"id": "ff1abd41-5680-443f-bc35-d7d1caa08147", "address": "fa:16:3e:73:c3:8a", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1abd41-56", "ovs_interfaceid": "ff1abd41-5680-443f-bc35-d7d1caa08147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 698.369114] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:c3:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff1abd41-5680-443f-bc35-d7d1caa08147', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.386733] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.387410] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39a2e156-b74e-494e-b000-4ca9407c9bf6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.402470] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Created folder: OpenStack in parent group-v4. [ 698.403257] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating folder: Project (df7574d0e6b64156aab8a21257521ec7). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.407579] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b795517-ccc9-41c8-8d54-6d4edf6bf915 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.420124] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Created folder: Project (df7574d0e6b64156aab8a21257521ec7) in parent group-v693573. [ 698.420124] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating folder: Instances. Parent ref: group-v693574. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.420927] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6caa1dd2-d50a-4e6f-882f-c36b2e9f17a4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.435988] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Created folder: Instances in parent group-v693574. [ 698.435988] env[68282]: DEBUG oslo.service.loopingcall [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.435988] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 698.438446] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5a9c001-c165-4ad9-b192-ac12039767df {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.464132] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.464132] env[68282]: value = "task-3470417" [ 698.464132] env[68282]: _type = "Task" [ 698.464132] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.471106] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 698.481533] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470417, 'name': CreateVM_Task} progress is 6%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.500937] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 698.501396] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 698.501570] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 698.501760] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 698.501922] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 698.502462] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 698.502722] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 698.502945] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 698.503073] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 698.503246] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 698.503423] env[68282]: DEBUG nova.virt.hardware [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.504366] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6943ce08-4150-414e-b0dd-7c6117e86023 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.513265] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb71c7-c01b-461c-9575-08923996d7e3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.529783] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.536968] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Creating folder: Project (3ba26113109444e98f6641dd8e48dafc). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.537175] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8964815f-0ab9-4e62-a8a2-9d3919ec4446 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.548399] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Created folder: Project (3ba26113109444e98f6641dd8e48dafc) in parent group-v693573. [ 698.548600] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Creating folder: Instances. Parent ref: group-v693577. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 698.548857] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdaa0310-4596-4963-b4d6-82dd3159272d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.557948] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Created folder: Instances in parent group-v693577. [ 698.558275] env[68282]: DEBUG oslo.service.loopingcall [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 698.558561] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 698.558797] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-58ed7f86-6b0c-4b7d-8ec4-d346de9c6aa8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.578346] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.578346] env[68282]: value = "task-3470420" [ 698.578346] env[68282]: _type = "Task" [ 698.578346] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.587323] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470420, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.977661] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470417, 'name': CreateVM_Task, 'duration_secs': 0.378254} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.978095] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 699.000766] env[68282]: DEBUG oslo_vmware.service [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569bac06-f297-4ac4-b834-d6036da345d8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.007248] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.007555] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.009520] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.009520] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cc6f1d6-3167-4d3c-a4c4-d2aaf5ce252b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.018129] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Waiting for the task: (returnval){ [ 699.018129] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]524e5973-757d-d34d-6762-da721e1362f3" [ 699.018129] env[68282]: _type = "Task" [ 699.018129] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.027613] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]524e5973-757d-d34d-6762-da721e1362f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.087106] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470420, 'name': CreateVM_Task, 'duration_secs': 0.323274} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.087637] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 699.087929] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.537879] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.538164] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.538420] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.538721] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.539136] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 699.539452] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.539760] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 699.539996] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f68dcca-8ed5-4e45-a8c3-56d4767dae8a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.542538] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8e97d6e-7db9-4fbc-a9f0-f6d47f676711 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.557482] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for the task: (returnval){ [ 699.557482] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52a42ebc-ce67-2041-202e-2c363dce9af4" [ 699.557482] env[68282]: _type = "Task" [ 699.557482] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.563642] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 699.563828] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 699.568258] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9df98e6-037a-4b58-8744-17e452a39d54 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.571887] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52a42ebc-ce67-2041-202e-2c363dce9af4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.578426] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b847466f-3b75-475e-b307-2d78222d0a87 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.585742] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Waiting for the task: (returnval){ [ 699.585742] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f84b5b-f21d-2c37-d160-6815e19cab16" [ 699.585742] env[68282]: _type = "Task" [ 699.585742] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.593230] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f84b5b-f21d-2c37-d160-6815e19cab16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.074594] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.074594] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 700.074853] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.096520] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 700.096520] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating directory with path [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.096520] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66c45711-ba7e-4528-91c7-ee084a8c9c11 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.118532] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Created directory with path [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.118616] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Fetch image to [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 700.118731] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 700.119650] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b644158-e9ce-445f-8350-6b86509f8490 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.127138] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f73b149-7bed-4487-b1f0-12020b0d9b47 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.139701] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5407d88e-c8cf-45ea-a970-b049a2a03da3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.171841] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c4231-e861-4994-b66f-a7335850b391 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.181361] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e96dc9d9-8c8b-4a67-a277-fe0f64eac9c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.207929] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 700.294545] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 700.371576] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 700.371576] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 700.855357] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "9cda9e61-a903-4156-b797-121d7142c021" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.857135] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "9cda9e61-a903-4156-b797-121d7142c021" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.880916] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 700.972143] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.972143] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.974161] env[68282]: INFO nova.compute.claims [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.040209] env[68282]: DEBUG nova.compute.manager [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Received event network-vif-plugged-ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 701.040523] env[68282]: DEBUG oslo_concurrency.lockutils [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] Acquiring lock "7423b2c7-699d-4c1b-82b9-683a2c08a261-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.040770] env[68282]: DEBUG oslo_concurrency.lockutils [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] Lock "7423b2c7-699d-4c1b-82b9-683a2c08a261-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.041101] env[68282]: DEBUG oslo_concurrency.lockutils [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] Lock "7423b2c7-699d-4c1b-82b9-683a2c08a261-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.044244] env[68282]: DEBUG nova.compute.manager [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] No waiting events found dispatching network-vif-plugged-ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 701.044497] env[68282]: WARNING nova.compute.manager [req-c6aa6ef7-23b6-463d-b0e3-05aeddf7cfb1 req-12ffc6f5-a43a-4949-8e48-73bffdb49b9f service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Received unexpected event network-vif-plugged-ff1abd41-5680-443f-bc35-d7d1caa08147 for instance with vm_state building and task_state spawning. [ 701.217382] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "167d0f09-4566-46f2-ab98-2acbc5810ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.217778] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "167d0f09-4566-46f2-ab98-2acbc5810ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.222027] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d91df4-52c2-43b0-ab84-864edf36381e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.228241] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b142f16-77da-40a7-8c21-82cae1c79e19 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.263487] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99828985-c322-465d-b125-14380e2046c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.266276] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 701.274693] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f0af41-51d9-4acf-9c24-6f4d9b755847 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.295123] env[68282]: DEBUG nova.compute.provider_tree [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.304951] env[68282]: DEBUG nova.scheduler.client.report [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 701.321021] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.348s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.321021] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 701.328820] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.329062] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.330792] env[68282]: INFO nova.compute.claims [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.368194] env[68282]: DEBUG nova.compute.utils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.370672] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 701.370997] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 701.385994] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 701.473066] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 701.496221] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a94e7e6-f200-4601-88e1-6ff2f6a669d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.512160] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 701.514689] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 701.514689] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.514689] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 701.514689] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.514689] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 701.515384] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 701.515384] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 701.515384] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 701.515384] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 701.515384] env[68282]: DEBUG nova.virt.hardware [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 701.515681] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06230f5e-3994-4b9c-bbd8-102067fd3004 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.521103] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4790a5b8-8131-4426-b6b2-fc2669b115c5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.559924] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dfa840-b5f8-4e1e-92c3-f0f39cfe7388 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.563781] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e13fd6d-4c0e-4ade-8872-eaf6f10c84c0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.581893] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452d8492-ffb9-4772-8995-264fe3028465 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.595997] env[68282]: DEBUG nova.compute.provider_tree [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.610142] env[68282]: DEBUG nova.scheduler.client.report [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 701.635256] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.635868] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 701.697445] env[68282]: DEBUG nova.policy [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '22be670c90ff42c099b066bcc0ab4510', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '42c53ef5da2f492daecd4ce3168614a2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 701.706044] env[68282]: DEBUG nova.compute.utils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 701.708252] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 701.708252] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 701.721068] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 701.834670] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 701.872712] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 701.872712] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 701.872712] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.872888] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 701.875873] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.875873] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 701.875873] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 701.875873] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 701.875873] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 701.876153] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 701.876153] env[68282]: DEBUG nova.virt.hardware [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 701.879454] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb33b71-f37f-42d2-89d4-67ebb65fa12e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.890502] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718690ce-a16f-4ed5-bd1a-4b1ceb0ac0a2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.190351] env[68282]: DEBUG nova.policy [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c5e305f7b1b46e6b5e875a98f5401cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58cecbfd7ad044659fae2b5ef452958d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 703.584133] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Successfully created port: fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.782542] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Successfully created port: 3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.011907] env[68282]: DEBUG nova.compute.manager [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Received event network-changed-ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 704.012142] env[68282]: DEBUG nova.compute.manager [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Refreshing instance network info cache due to event network-changed-ff1abd41-5680-443f-bc35-d7d1caa08147. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 704.013015] env[68282]: DEBUG oslo_concurrency.lockutils [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] Acquiring lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.013015] env[68282]: DEBUG oslo_concurrency.lockutils [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] Acquired lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.013015] env[68282]: DEBUG nova.network.neutron [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Refreshing network info cache for port ff1abd41-5680-443f-bc35-d7d1caa08147 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 705.612551] env[68282]: DEBUG nova.network.neutron [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Updated VIF entry in instance network info cache for port ff1abd41-5680-443f-bc35-d7d1caa08147. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 705.614079] env[68282]: DEBUG nova.network.neutron [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Updating instance_info_cache with network_info: [{"id": "ff1abd41-5680-443f-bc35-d7d1caa08147", "address": "fa:16:3e:73:c3:8a", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.248", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1abd41-56", "ovs_interfaceid": "ff1abd41-5680-443f-bc35-d7d1caa08147", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.631467] env[68282]: DEBUG oslo_concurrency.lockutils [req-0ac8ffb4-ad03-4d9e-9053-658f07a0ab46 req-f9e93557-4a36-489a-a86f-d86847c0cb87 service nova] Releasing lock "refresh_cache-7423b2c7-699d-4c1b-82b9-683a2c08a261" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.557060] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Successfully updated port: 3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.570942] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.571110] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquired lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.571501] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.719707] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.829282] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Successfully updated port: fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.842269] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.842269] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquired lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.842402] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 707.009028] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 707.596757] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Updating instance_info_cache with network_info: [{"id": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "address": "fa:16:3e:da:22:87", "network": {"id": "1a6d11b2-dc5e-447a-ac32-ca81f917fa6a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-381564456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cecbfd7ad044659fae2b5ef452958d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dea2c23-c8", "ovs_interfaceid": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.614153] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Releasing lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.614225] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Instance network_info: |[{"id": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "address": "fa:16:3e:da:22:87", "network": {"id": "1a6d11b2-dc5e-447a-ac32-ca81f917fa6a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-381564456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cecbfd7ad044659fae2b5ef452958d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dea2c23-c8", "ovs_interfaceid": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 707.614858] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:22:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359c2c31-99c4-41d7-a513-3bc4825897a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dea2c23-c812-4b38-874a-5ddfbb995c73', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.627065] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Creating folder: Project (58cecbfd7ad044659fae2b5ef452958d). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.628823] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc368578-97e0-48c9-b08e-07a8ada3355e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.642723] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Created folder: Project (58cecbfd7ad044659fae2b5ef452958d) in parent group-v693573. [ 707.642723] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Creating folder: Instances. Parent ref: group-v693580. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.642723] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba143918-5da4-4758-b692-ab54ff722b90 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.653527] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Created folder: Instances in parent group-v693580. [ 707.653527] env[68282]: DEBUG oslo.service.loopingcall [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.653527] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.653527] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c895a56a-0d36-40f5-9e2f-e73540d06b80 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.683468] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.683468] env[68282]: value = "task-3470423" [ 707.683468] env[68282]: _type = "Task" [ 707.683468] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.693616] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470423, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.809935] env[68282]: DEBUG nova.compute.manager [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Received event network-vif-plugged-fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 707.810224] env[68282]: DEBUG oslo_concurrency.lockutils [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] Acquiring lock "9cda9e61-a903-4156-b797-121d7142c021-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.810410] env[68282]: DEBUG oslo_concurrency.lockutils [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] Lock "9cda9e61-a903-4156-b797-121d7142c021-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.810576] env[68282]: DEBUG oslo_concurrency.lockutils [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] Lock "9cda9e61-a903-4156-b797-121d7142c021-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.810740] env[68282]: DEBUG nova.compute.manager [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] No waiting events found dispatching network-vif-plugged-fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 707.810901] env[68282]: WARNING nova.compute.manager [req-1d236049-97e9-407c-8213-25ff40abe57e req-772748fd-563c-4832-b96c-12d79dc0f9df service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Received unexpected event network-vif-plugged-fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f for instance with vm_state building and task_state spawning. [ 707.865298] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "3653a48c-6da3-488a-9b7c-b722032e71ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.865673] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.871730] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Updating instance_info_cache with network_info: [{"id": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "address": "fa:16:3e:42:89:7f", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa95f27d-c6", "ovs_interfaceid": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.891481] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 707.899286] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Releasing lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.899611] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Instance network_info: |[{"id": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "address": "fa:16:3e:42:89:7f", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa95f27d-c6", "ovs_interfaceid": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 707.900141] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:89:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.916598] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Creating folder: Project (42c53ef5da2f492daecd4ce3168614a2). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.917322] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-722df8b5-b03a-4999-8383-6529069d01bd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.932914] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Created folder: Project (42c53ef5da2f492daecd4ce3168614a2) in parent group-v693573. [ 707.933155] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Creating folder: Instances. Parent ref: group-v693583. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.936853] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e0cc6c9-79d5-4068-935a-862fa69be247 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.940167] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.940440] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.950351] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Created folder: Instances in parent group-v693583. [ 707.950795] env[68282]: DEBUG oslo.service.loopingcall [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.954021] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.954021] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52739c49-7b31-4e3c-842e-27a599567cf9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.972971] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 707.985017] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.985017] env[68282]: value = "task-3470426" [ 707.985017] env[68282]: _type = "Task" [ 707.985017] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.993962] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470426, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.033174] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.033517] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.035967] env[68282]: INFO nova.compute.claims [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.072889] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.199046] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470423, 'name': CreateVM_Task, 'duration_secs': 0.332429} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.201757] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 708.202680] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.202854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.203197] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 708.203443] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb73eed4-4fee-441d-8566-f7e13c237f5a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.209122] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Waiting for the task: (returnval){ [ 708.209122] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ac62b6-049a-6639-635d-51a4355cc69f" [ 708.209122] env[68282]: _type = "Task" [ 708.209122] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.220192] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ac62b6-049a-6639-635d-51a4355cc69f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.273354] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efec36e5-8a74-43f2-9d99-790bc5b44f0f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.281869] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a3c3dc-7aa3-46b4-9df0-467b099da199 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.320955] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1540da0a-df88-49f4-aa77-5e60d554bcc0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.328912] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13030131-ad5a-4f21-a0e8-58017449f9f3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.343710] env[68282]: DEBUG nova.compute.provider_tree [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.357133] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 708.377078] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.377658] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 708.380922] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.315s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.383220] env[68282]: INFO nova.compute.claims [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.395965] env[68282]: DEBUG nova.compute.manager [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Received event network-vif-plugged-3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 708.395965] env[68282]: DEBUG oslo_concurrency.lockutils [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] Acquiring lock "167d0f09-4566-46f2-ab98-2acbc5810ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.395965] env[68282]: DEBUG oslo_concurrency.lockutils [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] Lock "167d0f09-4566-46f2-ab98-2acbc5810ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.396132] env[68282]: DEBUG oslo_concurrency.lockutils [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] Lock "167d0f09-4566-46f2-ab98-2acbc5810ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.396165] env[68282]: DEBUG nova.compute.manager [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] No waiting events found dispatching network-vif-plugged-3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 708.396321] env[68282]: WARNING nova.compute.manager [req-1a77d408-c1c7-424c-a6fe-b1bff6c97e6c req-f557078e-1b57-40c1-958e-c791771032e3 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Received unexpected event network-vif-plugged-3dea2c23-c812-4b38-874a-5ddfbb995c73 for instance with vm_state building and task_state spawning. [ 708.438967] env[68282]: DEBUG nova.compute.utils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.448262] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 708.448262] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 708.462907] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 708.502981] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470426, 'name': CreateVM_Task, 'duration_secs': 0.348742} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.504641] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 708.506463] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.572638] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 708.606882] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.607929] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.607929] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.607929] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.607929] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.607929] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.609805] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.609904] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.610118] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.610301] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.610483] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.612644] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bc78e8-caed-4c3e-9de0-f08f60a2a1a1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.627126] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3c6ef4-d0f1-46c1-88f6-ef57eab9c6ab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.654917] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa63ebbe-b9fa-4349-99ac-0bd800f0f49b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.662427] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281b0586-3843-4662-a877-a992dab1ed0b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.700406] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb6d386-5cda-42a0-ac85-f160159f3ed6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.707560] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08fd21b-cd16-4c03-a39a-9d000b648912 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.725994] env[68282]: DEBUG nova.compute.provider_tree [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.730969] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.730969] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.731123] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.731848] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.731848] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 708.731997] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35f4baaf-533a-456b-bf1b-ee09a2ae4a98 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.736746] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Waiting for the task: (returnval){ [ 708.736746] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5263789b-770e-2b8c-f789-ba64e33bbce9" [ 708.736746] env[68282]: _type = "Task" [ 708.736746] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.738270] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 708.751981] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5263789b-770e-2b8c-f789-ba64e33bbce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.763102] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.382s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.763672] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 708.826769] env[68282]: DEBUG nova.compute.utils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.831192] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 708.831192] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 708.849359] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 708.867912] env[68282]: DEBUG nova.policy [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c652ea8d576f4b0786cd60b4e33cb289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96e012c6bb549879ca816a0ead25a59', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 708.947717] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 708.994226] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.994625] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.994701] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.994879] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.995057] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.995381] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.995500] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.995646] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.995829] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.996017] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.996189] env[68282]: DEBUG nova.virt.hardware [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.997577] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56828b4e-fb8a-4010-ab93-a9c879412bec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.009471] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f714471c-f340-45db-8158-c79ebdb1ac6c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.234171] env[68282]: DEBUG nova.policy [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c652ea8d576f4b0786cd60b4e33cb289', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96e012c6bb549879ca816a0ead25a59', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 709.252864] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.252864] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.252864] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.098358] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.098682] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.098869] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 710.098994] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 710.119930] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120106] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120249] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120380] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120509] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120682] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 710.120783] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 710.121360] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.121631] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.121785] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.122201] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.122201] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.122359] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.122533] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 710.122687] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.147050] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.147050] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.147050] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.147050] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 710.149051] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80381bc4-640f-4d8c-9e98-497160214897 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.162592] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466185eb-d898-4777-84d5-8dc9d696f82f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.177663] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875b3fa9-08a2-41a3-a514-8a007df49533 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.185075] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7361a570-3fa1-40b4-aee4-694ec0fc73da {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.220904] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180950MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 710.220904] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.220904] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.300924] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7423b2c7-699d-4c1b-82b9-683a2c08a261 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.301711] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 13b2d77c-448b-4558-b5ef-005064806213 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.302033] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9cda9e61-a903-4156-b797-121d7142c021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.302141] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 167d0f09-4566-46f2-ab98-2acbc5810ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.302258] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3653a48c-6da3-488a-9b7c-b722032e71ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.302320] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 710.302560] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 710.302628] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 710.432027] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf269007-7244-45c0-a2d7-74b3e3c3b688 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.440607] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b8b310-c624-4fd3-88fd-36ecbea46502 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.472964] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b752428-e8cc-4b26-b55f-f9d322d9f6c5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.480661] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a877eb-8e90-47bb-8080-97a06e4df845 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.500760] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.514468] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 710.535653] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 710.535891] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.315s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.740883] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Successfully created port: e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.083544] env[68282]: DEBUG nova.compute.manager [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Received event network-changed-fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 711.083778] env[68282]: DEBUG nova.compute.manager [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Refreshing instance network info cache due to event network-changed-fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 711.083995] env[68282]: DEBUG oslo_concurrency.lockutils [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] Acquiring lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.084151] env[68282]: DEBUG oslo_concurrency.lockutils [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] Acquired lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.084320] env[68282]: DEBUG nova.network.neutron [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Refreshing network info cache for port fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 711.291245] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Successfully created port: 29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.934642] env[68282]: DEBUG nova.compute.manager [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Received event network-changed-3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 711.934801] env[68282]: DEBUG nova.compute.manager [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Refreshing instance network info cache due to event network-changed-3dea2c23-c812-4b38-874a-5ddfbb995c73. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 711.935055] env[68282]: DEBUG oslo_concurrency.lockutils [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] Acquiring lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.935208] env[68282]: DEBUG oslo_concurrency.lockutils [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] Acquired lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.935367] env[68282]: DEBUG nova.network.neutron [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Refreshing network info cache for port 3dea2c23-c812-4b38-874a-5ddfbb995c73 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 713.491315] env[68282]: DEBUG nova.network.neutron [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Updated VIF entry in instance network info cache for port fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 713.491315] env[68282]: DEBUG nova.network.neutron [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Updating instance_info_cache with network_info: [{"id": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "address": "fa:16:3e:42:89:7f", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa95f27d-c6", "ovs_interfaceid": "fa95f27d-c6ec-4958-9e24-bb02e0e0ce3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.505953] env[68282]: DEBUG oslo_concurrency.lockutils [req-6180d8b6-fe9e-40ef-8c71-58eee5ea7a39 req-81e77146-0fcd-4302-8028-12a57e44518b service nova] Releasing lock "refresh_cache-9cda9e61-a903-4156-b797-121d7142c021" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.031046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "c2cb0b72-896b-46c6-bb41-90cded35468b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.031046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.051991] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 714.135562] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.135562] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.137473] env[68282]: INFO nova.compute.claims [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.433102] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c284ce02-3cd3-4341-af45-76781a4e749b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.443421] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a41742d-2de1-4ee8-8580-49398ebcf15b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.484567] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdb263a-cea2-44fb-b4f6-65b17e229f16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.492841] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92499c13-0142-4b3a-9f39-ab6ca0ef5f00 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.506647] env[68282]: DEBUG nova.compute.provider_tree [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.522049] env[68282]: DEBUG nova.scheduler.client.report [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 714.537266] env[68282]: DEBUG nova.network.neutron [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Updated VIF entry in instance network info cache for port 3dea2c23-c812-4b38-874a-5ddfbb995c73. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 714.537601] env[68282]: DEBUG nova.network.neutron [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Updating instance_info_cache with network_info: [{"id": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "address": "fa:16:3e:da:22:87", "network": {"id": "1a6d11b2-dc5e-447a-ac32-ca81f917fa6a", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-381564456-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58cecbfd7ad044659fae2b5ef452958d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359c2c31-99c4-41d7-a513-3bc4825897a0", "external-id": "nsx-vlan-transportzone-173", "segmentation_id": 173, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dea2c23-c8", "ovs_interfaceid": "3dea2c23-c812-4b38-874a-5ddfbb995c73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.555222] env[68282]: DEBUG oslo_concurrency.lockutils [req-04a84928-bf85-4f11-bb48-8f5e3ee04f67 req-574035e8-dd3a-44ce-8b54-c56db0d536d8 service nova] Releasing lock "refresh_cache-167d0f09-4566-46f2-ab98-2acbc5810ce4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.555993] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.422s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.556372] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 714.604027] env[68282]: DEBUG nova.compute.utils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.607293] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 714.607557] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 714.629394] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 714.718556] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 714.749336] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 714.749601] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 714.749815] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.750026] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 714.750185] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.750337] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 714.750551] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 714.750717] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 714.750916] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 714.751120] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 714.751278] env[68282]: DEBUG nova.virt.hardware [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 714.752203] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f90108-999d-44a2-900a-0dd8debc026d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.760436] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc260f34-3630-4ff4-950b-42b037abba30 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.350886] env[68282]: DEBUG nova.policy [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ddbcaedd76546ee98b088d178ee220b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2740c3ed53574a59a0431ba2084f2952', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 715.757130] env[68282]: DEBUG nova.compute.manager [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Received event network-vif-plugged-e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 715.757387] env[68282]: DEBUG oslo_concurrency.lockutils [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] Acquiring lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.757562] env[68282]: DEBUG oslo_concurrency.lockutils [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.759090] env[68282]: DEBUG oslo_concurrency.lockutils [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.759090] env[68282]: DEBUG nova.compute.manager [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] No waiting events found dispatching network-vif-plugged-e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 715.759090] env[68282]: WARNING nova.compute.manager [req-c073934f-076c-47d9-9605-e89e2895b3b5 req-a9a5d626-aad1-4189-8974-4fb15d263d19 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Received unexpected event network-vif-plugged-e18d0525-c2ab-4a10-bb23-6c1ca64f762e for instance with vm_state building and task_state spawning. [ 716.495257] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Successfully updated port: 29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.514044] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.514044] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.514044] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.594922] env[68282]: DEBUG nova.compute.manager [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Received event network-vif-plugged-29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 716.595204] env[68282]: DEBUG oslo_concurrency.lockutils [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] Acquiring lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.595461] env[68282]: DEBUG oslo_concurrency.lockutils [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.595599] env[68282]: DEBUG oslo_concurrency.lockutils [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.595773] env[68282]: DEBUG nova.compute.manager [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] No waiting events found dispatching network-vif-plugged-29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 716.595941] env[68282]: WARNING nova.compute.manager [req-4fb7c122-fa1f-480a-9635-c62b51ffcd6a req-51d1d074-051b-4d8b-98c4-c553dd94b781 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Received unexpected event network-vif-plugged-29f8d80a-bc91-42c3-bd1c-675efe06cdc9 for instance with vm_state building and task_state spawning. [ 716.724296] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.724296] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Successfully updated port: e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.749908] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.750065] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.750214] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.112207] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.816559] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Updating instance_info_cache with network_info: [{"id": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "address": "fa:16:3e:fa:8b:d7", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8d80a-bc", "ovs_interfaceid": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.833101] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.833410] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance network_info: |[{"id": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "address": "fa:16:3e:fa:8b:d7", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8d80a-bc", "ovs_interfaceid": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 717.833809] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:8b:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29f8d80a-bc91-42c3-bd1c-675efe06cdc9', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 717.842357] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating folder: Project (e96e012c6bb549879ca816a0ead25a59). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.842936] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9cc9947-5ce2-44d4-a193-98e9f0076262 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.854543] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created folder: Project (e96e012c6bb549879ca816a0ead25a59) in parent group-v693573. [ 717.854820] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating folder: Instances. Parent ref: group-v693586. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 717.855065] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2706cf96-1797-4474-9460-3e99c8cd81d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.864830] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created folder: Instances in parent group-v693586. [ 717.864985] env[68282]: DEBUG oslo.service.loopingcall [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 717.865199] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 717.867019] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2586dca0-57ce-40f4-85a3-42b390ad92c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.888573] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 717.888573] env[68282]: value = "task-3470429" [ 717.888573] env[68282]: _type = "Task" [ 717.888573] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.897412] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470429, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.416100] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470429, 'name': CreateVM_Task, 'duration_secs': 0.336029} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.416365] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 718.417726] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.418043] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.422212] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 718.422647] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f31fcd8c-54dc-4750-ac16-ca4898a73840 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.431301] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 718.431301] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52d047cd-415d-cd49-33bb-eaf0e79ffeee" [ 718.431301] env[68282]: _type = "Task" [ 718.431301] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.449588] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.450284] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.450720] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.123578] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Updating instance_info_cache with network_info: [{"id": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "address": "fa:16:3e:44:f9:74", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape18d0525-c2", "ovs_interfaceid": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.137597] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.138724] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance network_info: |[{"id": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "address": "fa:16:3e:44:f9:74", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape18d0525-c2", "ovs_interfaceid": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 719.139866] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:f9:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e18d0525-c2ab-4a10-bb23-6c1ca64f762e', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.149546] env[68282]: DEBUG oslo.service.loopingcall [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.150417] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 719.150672] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2191096b-5f96-49a3-ae04-7ce405d4c83d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.176176] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.176176] env[68282]: value = "task-3470430" [ 719.176176] env[68282]: _type = "Task" [ 719.176176] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.185547] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470430, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.665636] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Successfully created port: 89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.694481] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470430, 'name': CreateVM_Task, 'duration_secs': 0.293551} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.694670] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 719.698054] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.698054] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.698054] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 719.698368] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9137162c-662f-4fda-b70d-b48f0c621713 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.704388] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 719.704388] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5270f4d4-5070-7a41-93e5-42a83e28456e" [ 719.704388] env[68282]: _type = "Task" [ 719.704388] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.717113] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5270f4d4-5070-7a41-93e5-42a83e28456e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.217126] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.217381] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.217597] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.543211] env[68282]: DEBUG nova.compute.manager [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Received event network-changed-e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 720.543211] env[68282]: DEBUG nova.compute.manager [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Refreshing instance network info cache due to event network-changed-e18d0525-c2ab-4a10-bb23-6c1ca64f762e. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 720.543211] env[68282]: DEBUG oslo_concurrency.lockutils [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] Acquiring lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.545316] env[68282]: DEBUG oslo_concurrency.lockutils [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] Acquired lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.545733] env[68282]: DEBUG nova.network.neutron [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Refreshing network info cache for port e18d0525-c2ab-4a10-bb23-6c1ca64f762e {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 720.681602] env[68282]: DEBUG nova.compute.manager [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Received event network-changed-29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 720.681602] env[68282]: DEBUG nova.compute.manager [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Refreshing instance network info cache due to event network-changed-29f8d80a-bc91-42c3-bd1c-675efe06cdc9. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 720.681602] env[68282]: DEBUG oslo_concurrency.lockutils [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] Acquiring lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.681602] env[68282]: DEBUG oslo_concurrency.lockutils [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] Acquired lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.681602] env[68282]: DEBUG nova.network.neutron [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Refreshing network info cache for port 29f8d80a-bc91-42c3-bd1c-675efe06cdc9 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 720.998527] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "25eddb82-c2b3-499f-afe0-5141b4624342" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.001111] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.023545] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 721.142518] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.142829] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.144444] env[68282]: INFO nova.compute.claims [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.397488] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b93f1de-b054-4b26-9591-7f85f5ae55fb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.406031] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b28c4d8-aa2b-4f8d-b379-4b9843c15623 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.439457] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2b15e9-122a-49f7-9810-68c1b804d8cc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.449568] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9076c95d-f6e1-416a-a8aa-5c32363c2be3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.467732] env[68282]: DEBUG nova.compute.provider_tree [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.480048] env[68282]: DEBUG nova.scheduler.client.report [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 721.501243] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.358s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.501808] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 721.555194] env[68282]: DEBUG nova.compute.utils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.557966] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 721.558322] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 721.581179] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 721.669383] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 721.701027] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.701346] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.701519] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.701732] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.701885] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.702063] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.702314] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.702482] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.702954] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.703278] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.703474] env[68282]: DEBUG nova.virt.hardware [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.704723] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d6ee09-1517-410d-8eff-fa929da8d479 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.709383] env[68282]: DEBUG nova.policy [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf4121e54a45416093dd5c912870071c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77e1f75c16c540bfa0a8acede4b77c9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 721.717316] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2caa16-5cf6-45e6-81f2-a2231ec89602 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.589641] env[68282]: DEBUG nova.network.neutron [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Updated VIF entry in instance network info cache for port 29f8d80a-bc91-42c3-bd1c-675efe06cdc9. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 722.589641] env[68282]: DEBUG nova.network.neutron [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Updating instance_info_cache with network_info: [{"id": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "address": "fa:16:3e:fa:8b:d7", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8d80a-bc", "ovs_interfaceid": "29f8d80a-bc91-42c3-bd1c-675efe06cdc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.607714] env[68282]: DEBUG oslo_concurrency.lockutils [req-b3a1bfff-9ab4-4ffb-9724-80f481f5ec34 req-22e207db-3b79-46da-95e0-3abc373d58c3 service nova] Releasing lock "refresh_cache-9714bdd8-86ed-47eb-b703-efffe592aaf5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.791041] env[68282]: DEBUG nova.network.neutron [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Updated VIF entry in instance network info cache for port e18d0525-c2ab-4a10-bb23-6c1ca64f762e. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 722.791041] env[68282]: DEBUG nova.network.neutron [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Updating instance_info_cache with network_info: [{"id": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "address": "fa:16:3e:44:f9:74", "network": {"id": "8118198b-9090-4406-aad8-f2efc549d400", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1605841381-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96e012c6bb549879ca816a0ead25a59", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape18d0525-c2", "ovs_interfaceid": "e18d0525-c2ab-4a10-bb23-6c1ca64f762e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.803820] env[68282]: DEBUG oslo_concurrency.lockutils [req-671e938a-d6b5-4546-b41c-7e138520f4ab req-4cb72c87-872b-4a3c-b5a6-51264a7dd580 service nova] Releasing lock "refresh_cache-3653a48c-6da3-488a-9b7c-b722032e71ce" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.315026] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Successfully updated port: 89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.334212] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.334212] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquired lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.334212] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.638522] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.659782] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Successfully created port: 4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.088761] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Updating instance_info_cache with network_info: [{"id": "89c1ee11-ca11-4939-abdb-de2a22518367", "address": "fa:16:3e:4d:f7:c0", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c1ee11-ca", "ovs_interfaceid": "89c1ee11-ca11-4939-abdb-de2a22518367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.111877] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Releasing lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.113066] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance network_info: |[{"id": "89c1ee11-ca11-4939-abdb-de2a22518367", "address": "fa:16:3e:4d:f7:c0", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c1ee11-ca", "ovs_interfaceid": "89c1ee11-ca11-4939-abdb-de2a22518367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 725.114315] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:f7:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89c1ee11-ca11-4939-abdb-de2a22518367', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.129202] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Creating folder: Project (2740c3ed53574a59a0431ba2084f2952). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.129887] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24d02be7-ce94-45c4-9711-090245946a11 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.146151] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Created folder: Project (2740c3ed53574a59a0431ba2084f2952) in parent group-v693573. [ 725.146151] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Creating folder: Instances. Parent ref: group-v693590. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.146151] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-348bf6c2-dd65-4dee-9a02-e8461ffb2ee9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.157833] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Created folder: Instances in parent group-v693590. [ 725.157833] env[68282]: DEBUG oslo.service.loopingcall [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.157833] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.157833] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be9e9a1c-13e9-476f-999d-37c141b36dc5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.185061] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.185061] env[68282]: value = "task-3470433" [ 725.185061] env[68282]: _type = "Task" [ 725.185061] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.197856] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.198257] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.202987] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470433, 'name': CreateVM_Task} progress is 6%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.218626] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 725.321111] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.321111] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.322368] env[68282]: INFO nova.compute.claims [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.544868] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7b6a6c-4437-4ad1-8796-5f544c65a2c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.555336] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db01722c-57b9-4cbe-8f17-dc055116667a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.594059] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca1428b-00a4-44b2-89ee-497c1058669b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.601961] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3707510-73a1-4c6c-9e40-ae00ef00baee {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.616459] env[68282]: DEBUG nova.compute.provider_tree [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.623268] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "650fcdae-dc95-4191-9696-3b6f004bdb62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.623268] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.634173] env[68282]: DEBUG nova.scheduler.client.report [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 725.637747] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 725.650942] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.329s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.650942] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 725.705022] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470433, 'name': CreateVM_Task, 'duration_secs': 0.31003} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.706612] env[68282]: DEBUG nova.compute.utils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.708771] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 725.709338] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 725.709590] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 725.711846] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.712015] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.715500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.718053] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.720829] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.720829] env[68282]: INFO nova.compute.claims [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.724792] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc2ceeb2-68ff-4cf7-8aed-8b4c08788579 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.731431] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 725.736446] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for the task: (returnval){ [ 725.736446] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fdcdb9-f968-324d-c2fd-62561d479caa" [ 725.736446] env[68282]: _type = "Task" [ 725.736446] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.750030] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fdcdb9-f968-324d-c2fd-62561d479caa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.818580] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 725.855810] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:49:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='691085135',id=23,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-1829015011',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.856077] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.856243] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.857520] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.857520] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.857520] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.857520] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.859040] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.859289] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.859495] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.859698] env[68282]: DEBUG nova.virt.hardware [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.860999] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f68017-3ef1-49ed-8087-6a2376e13f13 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.873198] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ff5a02-6e34-4b78-b0fe-f1d3c2779e87 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.893979] env[68282]: DEBUG nova.policy [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f757ee0d471d4277a84ed6b70470fbe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d10b0f198bb480eb7d9d5c14f982556', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 725.981816] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0799d15b-0afd-451e-b0e5-cfacfbbc5c54 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.993304] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819a02c6-e8d3-4f4d-9fac-baafe81a0307 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.033910] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf449c8b-07aa-48bb-9877-8551254bdf94 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.041167] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027f5c07-9c2d-40e8-a8d2-a5bdbb6ecfb5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.056609] env[68282]: DEBUG nova.compute.provider_tree [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.070944] env[68282]: DEBUG nova.scheduler.client.report [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 726.092728] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.374s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.094171] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 726.169200] env[68282]: DEBUG nova.compute.utils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.173858] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 726.196264] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 726.259572] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.262018] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.262018] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.323981] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 726.364027] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 726.364295] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 726.364448] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.365257] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 726.365257] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.365257] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 726.366246] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 726.367698] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 726.367698] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 726.367698] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 726.367698] env[68282]: DEBUG nova.virt.hardware [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 726.367865] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576baeb7-7b24-494a-af25-d5aa7ff46a11 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.382155] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f143f72-b16d-45f5-b8bb-1329141b8bc3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.400391] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 726.406436] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Creating folder: Project (704e3849ff204fdd8c5e33c648168206). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 726.406793] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bdd8bb61-32eb-457d-a81e-2a74b9ae5f7c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.418726] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Created folder: Project (704e3849ff204fdd8c5e33c648168206) in parent group-v693573. [ 726.418926] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Creating folder: Instances. Parent ref: group-v693593. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 726.420265] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d46989a5-cb9d-4ec2-9b91-b1e68a92faf8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.428754] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Created folder: Instances in parent group-v693593. [ 726.428754] env[68282]: DEBUG oslo.service.loopingcall [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 726.428754] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 726.428754] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d09a0fd-6808-48df-a852-aa889b111795 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.446567] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 726.446567] env[68282]: value = "task-3470436" [ 726.446567] env[68282]: _type = "Task" [ 726.446567] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.456769] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470436, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.615709] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "42977331-21c5-4169-889f-37dfbb10b6ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.615961] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.956604] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470436, 'name': CreateVM_Task, 'duration_secs': 0.290643} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.956844] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 726.957294] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.957565] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.958016] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.958086] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80548bf3-6639-4bfb-829c-e767e307e684 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.962917] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for the task: (returnval){ [ 726.962917] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ec1a45-532b-7a06-c141-ed2e21e529ba" [ 726.962917] env[68282]: _type = "Task" [ 726.962917] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.971773] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ec1a45-532b-7a06-c141-ed2e21e529ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.480019] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.480019] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.480019] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.530895] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Successfully created port: 998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.021952] env[68282]: DEBUG nova.compute.manager [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Received event network-vif-plugged-89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 728.024019] env[68282]: DEBUG oslo_concurrency.lockutils [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] Acquiring lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.024449] env[68282]: DEBUG oslo_concurrency.lockutils [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.024841] env[68282]: DEBUG oslo_concurrency.lockutils [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.024952] env[68282]: DEBUG nova.compute.manager [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] No waiting events found dispatching network-vif-plugged-89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 728.025110] env[68282]: WARNING nova.compute.manager [req-5e8af41f-22ba-4c11-a7cb-dbb4e780fa1d req-fb421bd2-8306-4019-9da7-aefada27dd33 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Received unexpected event network-vif-plugged-89c1ee11-ca11-4939-abdb-de2a22518367 for instance with vm_state building and task_state spawning. [ 729.003104] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Successfully updated port: 4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.024854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.024854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquired lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.024854] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.182377] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.356882] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Successfully updated port: 998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.373331] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.373483] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquired lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.373625] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.508718] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.069093] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Updating instance_info_cache with network_info: [{"id": "4088792f-95ac-4b48-b3c3-3707a477cd88", "address": "fa:16:3e:d9:89:eb", "network": {"id": "30070275-16c4-445e-af20-a67253cb2cae", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-271114781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77e1f75c16c540bfa0a8acede4b77c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4088792f-95", "ovs_interfaceid": "4088792f-95ac-4b48-b3c3-3707a477cd88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.093469] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Releasing lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.093786] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance network_info: |[{"id": "4088792f-95ac-4b48-b3c3-3707a477cd88", "address": "fa:16:3e:d9:89:eb", "network": {"id": "30070275-16c4-445e-af20-a67253cb2cae", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-271114781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77e1f75c16c540bfa0a8acede4b77c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4088792f-95", "ovs_interfaceid": "4088792f-95ac-4b48-b3c3-3707a477cd88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.094714] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:89:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4088792f-95ac-4b48-b3c3-3707a477cd88', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.105360] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Creating folder: Project (77e1f75c16c540bfa0a8acede4b77c9c). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.110119] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-197440df-1382-4632-aea1-b4ec8a88ce4d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.124593] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Created folder: Project (77e1f75c16c540bfa0a8acede4b77c9c) in parent group-v693573. [ 730.125107] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Creating folder: Instances. Parent ref: group-v693596. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.125107] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-382e23f3-18d3-4a1b-b703-e29800f11589 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.136708] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Created folder: Instances in parent group-v693596. [ 730.136708] env[68282]: DEBUG oslo.service.loopingcall [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.136708] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.137973] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18e9cec7-da07-4016-987c-d485223d303a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.170207] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.170207] env[68282]: value = "task-3470439" [ 730.170207] env[68282]: _type = "Task" [ 730.170207] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.180569] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470439, 'name': CreateVM_Task} progress is 5%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.252321] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.252754] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.304302] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Updating instance_info_cache with network_info: [{"id": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "address": "fa:16:3e:50:e8:27", "network": {"id": "e7f59361-9f77-41b3-9d9a-5145e798e8dc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1069318635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d10b0f198bb480eb7d9d5c14f982556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998fd870-2f", "ovs_interfaceid": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.317035] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Releasing lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.317199] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance network_info: |[{"id": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "address": "fa:16:3e:50:e8:27", "network": {"id": "e7f59361-9f77-41b3-9d9a-5145e798e8dc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1069318635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d10b0f198bb480eb7d9d5c14f982556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998fd870-2f", "ovs_interfaceid": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.317295] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:e8:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '998fd870-2f8c-49ea-bcad-ed47565c9d8c', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.327114] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Creating folder: Project (9d10b0f198bb480eb7d9d5c14f982556). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.327801] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5a80c5e-d2a7-45ac-a36a-eeb8124ec83e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.344152] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Created folder: Project (9d10b0f198bb480eb7d9d5c14f982556) in parent group-v693573. [ 730.344152] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Creating folder: Instances. Parent ref: group-v693599. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.344152] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6da292ef-3111-4012-b593-26968307115c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.354508] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Created folder: Instances in parent group-v693599. [ 730.354770] env[68282]: DEBUG oslo.service.loopingcall [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.354959] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.355180] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b231bcff-83c9-48b1-aaec-680898096758 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.376530] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.376530] env[68282]: value = "task-3470442" [ 730.376530] env[68282]: _type = "Task" [ 730.376530] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.385985] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470442, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.683788] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470439, 'name': CreateVM_Task, 'duration_secs': 0.322006} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.683788] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 730.684825] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.684935] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.685266] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 730.685574] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-669b71d3-6c08-4650-8b9f-bf10f43831b7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.691091] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for the task: (returnval){ [ 730.691091] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5220520b-8e9f-d0d4-6a41-618e081ba437" [ 730.691091] env[68282]: _type = "Task" [ 730.691091] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.699499] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5220520b-8e9f-d0d4-6a41-618e081ba437, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.890261] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470442, 'name': CreateVM_Task, 'duration_secs': 0.33422} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.890261] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 730.890261] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.211377] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.211802] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.211936] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.212272] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.213385] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 731.213723] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c92da82c-0622-4640-8c2c-4cb2ffc700f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.223821] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for the task: (returnval){ [ 731.223821] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b71786-361e-8c62-14e4-6712421c3bb1" [ 731.223821] env[68282]: _type = "Task" [ 731.223821] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.242253] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b71786-361e-8c62-14e4-6712421c3bb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.339592] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.339846] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.681433] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.681433] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.735269] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.735557] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.735829] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.524804] env[68282]: DEBUG nova.compute.manager [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Received event network-vif-plugged-4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 732.525107] env[68282]: DEBUG oslo_concurrency.lockutils [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] Acquiring lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.525239] env[68282]: DEBUG oslo_concurrency.lockutils [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] Lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.525401] env[68282]: DEBUG oslo_concurrency.lockutils [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] Lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.525566] env[68282]: DEBUG nova.compute.manager [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] No waiting events found dispatching network-vif-plugged-4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 732.525737] env[68282]: WARNING nova.compute.manager [req-6b984713-d75d-426d-a78c-d6bbba3fc2a9 req-22d74aa5-c1b3-4367-b2c1-da768a7150fe service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Received unexpected event network-vif-plugged-4088792f-95ac-4b48-b3c3-3707a477cd88 for instance with vm_state building and task_state spawning. [ 732.584320] env[68282]: DEBUG oslo_concurrency.lockutils [None req-744ae292-6e38-4398-adef-ae8782e07d64 tempest-TenantUsagesTestJSON-575767908 tempest-TenantUsagesTestJSON-575767908-project-member] Acquiring lock "81731456-9c86-4e6f-ae95-7b7455f322d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.585034] env[68282]: DEBUG oslo_concurrency.lockutils [None req-744ae292-6e38-4398-adef-ae8782e07d64 tempest-TenantUsagesTestJSON-575767908 tempest-TenantUsagesTestJSON-575767908-project-member] Lock "81731456-9c86-4e6f-ae95-7b7455f322d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.537427] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Received event network-changed-89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 734.537427] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Refreshing instance network info cache due to event network-changed-89c1ee11-ca11-4939-abdb-de2a22518367. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 734.537427] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Acquiring lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.537427] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Acquired lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.537427] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Refreshing network info cache for port 89c1ee11-ca11-4939-abdb-de2a22518367 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 735.409628] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Updated VIF entry in instance network info cache for port 89c1ee11-ca11-4939-abdb-de2a22518367. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 735.410019] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Updating instance_info_cache with network_info: [{"id": "89c1ee11-ca11-4939-abdb-de2a22518367", "address": "fa:16:3e:4d:f7:c0", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.205", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c1ee11-ca", "ovs_interfaceid": "89c1ee11-ca11-4939-abdb-de2a22518367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Releasing lock "refresh_cache-c2cb0b72-896b-46c6-bb41-90cded35468b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.424405] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Received event network-vif-plugged-998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Acquiring lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.424405] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] No waiting events found dispatching network-vif-plugged-998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 735.424405] env[68282]: WARNING nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Received unexpected event network-vif-plugged-998fd870-2f8c-49ea-bcad-ed47565c9d8c for instance with vm_state building and task_state spawning. [ 735.424405] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Received event network-changed-998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 735.424405] env[68282]: DEBUG nova.compute.manager [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Refreshing instance network info cache due to event network-changed-998fd870-2f8c-49ea-bcad-ed47565c9d8c. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Acquiring lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.424405] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Acquired lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.424405] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Refreshing network info cache for port 998fd870-2f8c-49ea-bcad-ed47565c9d8c {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 736.108279] env[68282]: DEBUG oslo_concurrency.lockutils [None req-75666080-7980-4dd5-8d63-eea322118851 tempest-VolumesAssistedSnapshotsTest-988209846 tempest-VolumesAssistedSnapshotsTest-988209846-project-member] Acquiring lock "85d9f1dc-0d1e-44e0-92cc-7d7511acb786" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.108902] env[68282]: DEBUG oslo_concurrency.lockutils [None req-75666080-7980-4dd5-8d63-eea322118851 tempest-VolumesAssistedSnapshotsTest-988209846 tempest-VolumesAssistedSnapshotsTest-988209846-project-member] Lock "85d9f1dc-0d1e-44e0-92cc-7d7511acb786" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.225177] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b188c1ae-2041-4aaf-ab91-9d222af0405b tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "9170a530-958f-46ec-a36c-d19baac1869c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.225407] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b188c1ae-2041-4aaf-ab91-9d222af0405b tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9170a530-958f-46ec-a36c-d19baac1869c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.290758] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Updated VIF entry in instance network info cache for port 998fd870-2f8c-49ea-bcad-ed47565c9d8c. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 736.290854] env[68282]: DEBUG nova.network.neutron [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Updating instance_info_cache with network_info: [{"id": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "address": "fa:16:3e:50:e8:27", "network": {"id": "e7f59361-9f77-41b3-9d9a-5145e798e8dc", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1069318635-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9d10b0f198bb480eb7d9d5c14f982556", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap998fd870-2f", "ovs_interfaceid": "998fd870-2f8c-49ea-bcad-ed47565c9d8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.303936] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c9fed21-9246-41e2-ab3e-cd53ff563a2c req-9f95f606-0223-42c4-be3c-a7cf7ce64994 service nova] Releasing lock "refresh_cache-97cb5db2-5a4a-4a17-afde-3af1c15ae733" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.789118] env[68282]: DEBUG nova.compute.manager [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Received event network-changed-4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 737.789118] env[68282]: DEBUG nova.compute.manager [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Refreshing instance network info cache due to event network-changed-4088792f-95ac-4b48-b3c3-3707a477cd88. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 737.789118] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] Acquiring lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.789118] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] Acquired lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.789118] env[68282]: DEBUG nova.network.neutron [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Refreshing network info cache for port 4088792f-95ac-4b48-b3c3-3707a477cd88 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 738.128380] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9c279fec-af52-481d-8ca4-31f301dd3e87 tempest-ServersV294TestFqdnHostnames-1599076239 tempest-ServersV294TestFqdnHostnames-1599076239-project-member] Acquiring lock "a4fed27d-e797-42d1-b738-8f24ebd708ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.128634] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9c279fec-af52-481d-8ca4-31f301dd3e87 tempest-ServersV294TestFqdnHostnames-1599076239 tempest-ServersV294TestFqdnHostnames-1599076239-project-member] Lock "a4fed27d-e797-42d1-b738-8f24ebd708ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.753855] env[68282]: DEBUG nova.network.neutron [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Updated VIF entry in instance network info cache for port 4088792f-95ac-4b48-b3c3-3707a477cd88. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 738.753855] env[68282]: DEBUG nova.network.neutron [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Updating instance_info_cache with network_info: [{"id": "4088792f-95ac-4b48-b3c3-3707a477cd88", "address": "fa:16:3e:d9:89:eb", "network": {"id": "30070275-16c4-445e-af20-a67253cb2cae", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-271114781-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77e1f75c16c540bfa0a8acede4b77c9c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4088792f-95", "ovs_interfaceid": "4088792f-95ac-4b48-b3c3-3707a477cd88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.765703] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a6c75e3-1448-4244-bd70-e522225fb19a req-a5b3af63-1903-44a6-85fb-6e26bc3e0d78 service nova] Releasing lock "refresh_cache-25eddb82-c2b3-499f-afe0-5141b4624342" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.389729] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b292cb6a-f0f5-452b-97c8-7f88e0bc1cee tempest-InstanceActionsV221TestJSON-1450215027 tempest-InstanceActionsV221TestJSON-1450215027-project-member] Acquiring lock "157e19df-fe8f-4287-9c5a-03eefdf05aa4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.391067] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b292cb6a-f0f5-452b-97c8-7f88e0bc1cee tempest-InstanceActionsV221TestJSON-1450215027 tempest-InstanceActionsV221TestJSON-1450215027-project-member] Lock "157e19df-fe8f-4287-9c5a-03eefdf05aa4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.031922] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2b383ce1-0e36-4c76-be3b-69f570caf383 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "8be63333-3269-4158-8476-a3032a185131" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.032384] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2b383ce1-0e36-4c76-be3b-69f570caf383 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "8be63333-3269-4158-8476-a3032a185131" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.860804] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2c6175bb-481f-4ccb-a63e-45147280e2dc tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Acquiring lock "35669e1a-cb77-42be-9e1d-7300ea872d5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.861090] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2c6175bb-481f-4ccb-a63e-45147280e2dc tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Lock "35669e1a-cb77-42be-9e1d-7300ea872d5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.208730] env[68282]: WARNING oslo_vmware.rw_handles [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 746.208730] env[68282]: ERROR oslo_vmware.rw_handles [ 746.209442] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 746.212817] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 746.212817] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Copying Virtual Disk [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/0b161ab0-8398-41cf-99ca-2c7c141d3918/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 746.212817] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-187c76a2-397f-4532-bf22-20e70f12dd39 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.223109] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Waiting for the task: (returnval){ [ 746.223109] env[68282]: value = "task-3470443" [ 746.223109] env[68282]: _type = "Task" [ 746.223109] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.229824] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Task: {'id': task-3470443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.735454] env[68282]: DEBUG oslo_vmware.exceptions [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 746.735454] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.740920] env[68282]: ERROR nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.740920] env[68282]: Faults: ['InvalidArgument'] [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Traceback (most recent call last): [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] yield resources [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self.driver.spawn(context, instance, image_meta, [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self._fetch_image_if_missing(context, vi) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] image_cache(vi, tmp_image_ds_loc) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] vm_util.copy_virtual_disk( [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] session._wait_for_task(vmdk_copy_task) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return self.wait_for_task(task_ref) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return evt.wait() [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] result = hub.switch() [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return self.greenlet.switch() [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self.f(*self.args, **self.kw) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] raise exceptions.translate_fault(task_info.error) [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Faults: ['InvalidArgument'] [ 746.740920] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] [ 746.744181] env[68282]: INFO nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Terminating instance [ 746.744181] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.744494] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.745692] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 746.745692] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 746.745863] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7ef99a4-6f7d-4654-b499-95d34394390e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.750930] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6b9678-a8f4-464d-b5b0-c11a4f013f6a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.763355] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 746.764888] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-793a65f9-9f68-48cb-8820-576427df2fc1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.768802] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.768981] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 746.769683] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60a8afbc-3f04-46d0-b652-8c32653cef59 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.776957] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for the task: (returnval){ [ 746.776957] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]525e40be-29d5-e0b3-1a45-788d2fddc348" [ 746.776957] env[68282]: _type = "Task" [ 746.776957] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.785858] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]525e40be-29d5-e0b3-1a45-788d2fddc348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.837347] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 746.837548] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 746.837728] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Deleting the datastore file [datastore2] 7423b2c7-699d-4c1b-82b9-683a2c08a261 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.838189] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-023191b7-7d2b-4f39-81e6-86d85f0e4f07 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.844980] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Waiting for the task: (returnval){ [ 746.844980] env[68282]: value = "task-3470445" [ 746.844980] env[68282]: _type = "Task" [ 746.844980] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.854823] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Task: {'id': task-3470445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.290034] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 747.290337] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Creating directory with path [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.290588] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aca05e43-eff9-4ca6-9fa3-ea3b1e1b1b6e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.306674] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Created directory with path [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.306883] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Fetch image to [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 747.309152] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 747.310061] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b221f7b8-ec27-4204-880b-e2314373907b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.318469] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90deb57c-82cc-4f8d-bf54-c051fa8f5a47 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.329615] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6eb834-938a-44f0-bbd5-05a90f34557e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.369084] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c03643-fdb3-41f4-9a52-5aab63fab670 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.380776] env[68282]: DEBUG oslo_vmware.api [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Task: {'id': task-3470445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081771} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.380776] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.380776] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 747.380776] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 747.380776] env[68282]: INFO nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Took 0.63 seconds to destroy the instance on the hypervisor. [ 747.381696] env[68282]: DEBUG nova.compute.claims [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 747.381877] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.382105] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.387585] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f3bea3f2-eb62-405b-a394-c30787175fbb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.417847] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 747.515045] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 747.586921] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 747.586921] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 747.901944] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b6c63d-fc02-4bbc-bcf6-c5cd2c92bbbb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.909891] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b1e2fa-af07-4114-aeb9-b4d5ecb84fcc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.950429] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d60441-0446-4e56-af8b-57c725f03d41 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.959021] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21480a51-4329-47c1-a635-004f79ed4e48 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.972836] env[68282]: DEBUG nova.compute.provider_tree [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.985326] env[68282]: DEBUG nova.scheduler.client.report [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 748.015524] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.633s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.016160] env[68282]: ERROR nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.016160] env[68282]: Faults: ['InvalidArgument'] [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Traceback (most recent call last): [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self.driver.spawn(context, instance, image_meta, [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self._fetch_image_if_missing(context, vi) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] image_cache(vi, tmp_image_ds_loc) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] vm_util.copy_virtual_disk( [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] session._wait_for_task(vmdk_copy_task) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return self.wait_for_task(task_ref) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return evt.wait() [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] result = hub.switch() [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] return self.greenlet.switch() [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] self.f(*self.args, **self.kw) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] raise exceptions.translate_fault(task_info.error) [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Faults: ['InvalidArgument'] [ 748.016160] env[68282]: ERROR nova.compute.manager [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] [ 748.017308] env[68282]: DEBUG nova.compute.utils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.020486] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Build of instance 7423b2c7-699d-4c1b-82b9-683a2c08a261 was re-scheduled: A specified parameter was not correct: fileType [ 748.020486] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 748.021211] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 748.021211] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 748.022112] env[68282]: DEBUG nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 748.022112] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.441382] env[68282]: DEBUG nova.network.neutron [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.456286] env[68282]: INFO nova.compute.manager [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: 7423b2c7-699d-4c1b-82b9-683a2c08a261] Took 0.43 seconds to deallocate network for instance. [ 748.569696] env[68282]: INFO nova.scheduler.client.report [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Deleted allocations for instance 7423b2c7-699d-4c1b-82b9-683a2c08a261 [ 748.608552] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b0a55f61-b910-47e8-848c-8c685a908eb5 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "7423b2c7-699d-4c1b-82b9-683a2c08a261" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.689s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.638536] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 748.712233] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.715226] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.715226] env[68282]: INFO nova.compute.claims [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.126573] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c299c53-2fbc-4df0-bdbd-caa79a17ffb1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.136128] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238abc44-b8b5-4def-be5b-a286fb400e6a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.175408] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccd5cef-7ad6-4278-8ea4-0aac49318b27 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.184149] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0939d3b-01fd-4c0e-bbc3-d46b34e9f55c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.199494] env[68282]: DEBUG nova.compute.provider_tree [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.212232] env[68282]: DEBUG nova.scheduler.client.report [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 749.237345] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "50234924-2933-4a79-9a33-3cb968b6e08a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.237734] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.238772] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.526s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.238929] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 749.281034] env[68282]: DEBUG nova.compute.utils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.282654] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 749.283028] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 749.295231] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 749.354479] env[68282]: DEBUG nova.policy [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc8faa87fd10439f976feb9bc90c433d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd9628e9488840a98ad37af2dfd7146f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 749.381468] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 749.411155] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.411692] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.411927] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.412198] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.412453] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.412676] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.412955] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.413195] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.413438] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.413668] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.413905] env[68282]: DEBUG nova.virt.hardware [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.414864] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aee4431-cbba-467e-8dbe-71754de1c22b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.424901] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19ab036-303a-4bc9-8250-8d8d362b2384 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.795120] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Successfully created port: 26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.481754] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Successfully updated port: 26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.499304] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.499304] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquired lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.499304] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 750.505482] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2fca969e-fbb6-49c3-8caa-20f7681745da tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "ada46260-9977-4009-a4f1-c08f2222b6e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.505634] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2fca969e-fbb6-49c3-8caa-20f7681745da tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "ada46260-9977-4009-a4f1-c08f2222b6e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.544803] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 750.723845] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Updating instance_info_cache with network_info: [{"id": "26754f76-ea30-48d7-abea-700c8e50e476", "address": "fa:16:3e:9c:d8:93", "network": {"id": "52f212dd-7298-4856-9651-c4c0766b686e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-120485675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9628e9488840a98ad37af2dfd7146f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73e099e8-2acc-4628-a60d-0b4afa46b39d", "external-id": "nsx-vlan-transportzone-767", "segmentation_id": 767, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26754f76-ea", "ovs_interfaceid": "26754f76-ea30-48d7-abea-700c8e50e476", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.735564] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Releasing lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.735970] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance network_info: |[{"id": "26754f76-ea30-48d7-abea-700c8e50e476", "address": "fa:16:3e:9c:d8:93", "network": {"id": "52f212dd-7298-4856-9651-c4c0766b686e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-120485675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9628e9488840a98ad37af2dfd7146f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73e099e8-2acc-4628-a60d-0b4afa46b39d", "external-id": "nsx-vlan-transportzone-767", "segmentation_id": 767, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26754f76-ea", "ovs_interfaceid": "26754f76-ea30-48d7-abea-700c8e50e476", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 750.736840] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:d8:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73e099e8-2acc-4628-a60d-0b4afa46b39d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26754f76-ea30-48d7-abea-700c8e50e476', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.744199] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Creating folder: Project (dd9628e9488840a98ad37af2dfd7146f). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.744679] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e852b4a-1988-4f84-9f4d-21fc4eed0067 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.754643] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Created folder: Project (dd9628e9488840a98ad37af2dfd7146f) in parent group-v693573. [ 750.754822] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Creating folder: Instances. Parent ref: group-v693605. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.755046] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6d7e37e-d830-4def-8eb2-f0d8f6ce518d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.763493] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Created folder: Instances in parent group-v693605. [ 750.763715] env[68282]: DEBUG oslo.service.loopingcall [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.764580] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 750.764801] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c702c334-9d05-4adf-aaae-37a14f955503 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.784372] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.784372] env[68282]: value = "task-3470452" [ 750.784372] env[68282]: _type = "Task" [ 750.784372] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.791698] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470452, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.302358] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470452, 'name': CreateVM_Task, 'duration_secs': 0.284088} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.302610] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 751.303319] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.303500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.303828] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.304121] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2af5fa1b-bd7c-42f4-9a06-579202ce1116 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.312023] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for the task: (returnval){ [ 751.312023] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ddc177-f980-f61f-66ca-765e9c2ce231" [ 751.312023] env[68282]: _type = "Task" [ 751.312023] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.318331] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ddc177-f980-f61f-66ca-765e9c2ce231, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.821233] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.821233] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.821374] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.492125] env[68282]: DEBUG nova.compute.manager [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Received event network-vif-plugged-26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 753.492367] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Acquiring lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.492553] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.492717] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.492880] env[68282]: DEBUG nova.compute.manager [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] No waiting events found dispatching network-vif-plugged-26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 753.494179] env[68282]: WARNING nova.compute.manager [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Received unexpected event network-vif-plugged-26754f76-ea30-48d7-abea-700c8e50e476 for instance with vm_state building and task_state spawning. [ 753.494562] env[68282]: DEBUG nova.compute.manager [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Received event network-changed-26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 753.494682] env[68282]: DEBUG nova.compute.manager [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Refreshing instance network info cache due to event network-changed-26754f76-ea30-48d7-abea-700c8e50e476. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 753.494902] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Acquiring lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.495159] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Acquired lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.495639] env[68282]: DEBUG nova.network.neutron [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Refreshing network info cache for port 26754f76-ea30-48d7-abea-700c8e50e476 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 753.989555] env[68282]: DEBUG nova.network.neutron [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Updated VIF entry in instance network info cache for port 26754f76-ea30-48d7-abea-700c8e50e476. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 753.989555] env[68282]: DEBUG nova.network.neutron [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Updating instance_info_cache with network_info: [{"id": "26754f76-ea30-48d7-abea-700c8e50e476", "address": "fa:16:3e:9c:d8:93", "network": {"id": "52f212dd-7298-4856-9651-c4c0766b686e", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-120485675-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd9628e9488840a98ad37af2dfd7146f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73e099e8-2acc-4628-a60d-0b4afa46b39d", "external-id": "nsx-vlan-transportzone-767", "segmentation_id": 767, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26754f76-ea", "ovs_interfaceid": "26754f76-ea30-48d7-abea-700c8e50e476", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.007132] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc8f6898-8503-4fcc-9ba8-2f7326d6284f req-7c841c9f-fe97-40c6-9c07-aec4ccad7686 service nova] Releasing lock "refresh_cache-42977331-21c5-4169-889f-37dfbb10b6ef" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.270503] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7d7283b4-bd63-45a4-8e7b-0e5ec8360a96 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Acquiring lock "ac165200-e27f-4d58-83db-419b21a80862" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.271013] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7d7283b4-bd63-45a4-8e7b-0e5ec8360a96 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "ac165200-e27f-4d58-83db-419b21a80862" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.353861] env[68282]: DEBUG oslo_concurrency.lockutils [None req-67e5bd5e-11af-418f-b320-9cc2a7d36fac tempest-ServerActionsTestOtherA-1546743166 tempest-ServerActionsTestOtherA-1546743166-project-member] Acquiring lock "3289ea9a-950e-4baa-8423-c00250207ef9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.354206] env[68282]: DEBUG oslo_concurrency.lockutils [None req-67e5bd5e-11af-418f-b320-9cc2a7d36fac tempest-ServerActionsTestOtherA-1546743166 tempest-ServerActionsTestOtherA-1546743166-project-member] Lock "3289ea9a-950e-4baa-8423-c00250207ef9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.335024] env[68282]: DEBUG oslo_concurrency.lockutils [None req-078984b5-d61f-412f-8322-3e6e29dd5f3e tempest-ServerMetadataNegativeTestJSON-1697762306 tempest-ServerMetadataNegativeTestJSON-1697762306-project-member] Acquiring lock "3bc646fe-4a97-4fff-a5ae-54b62a292c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.335310] env[68282]: DEBUG oslo_concurrency.lockutils [None req-078984b5-d61f-412f-8322-3e6e29dd5f3e tempest-ServerMetadataNegativeTestJSON-1697762306 tempest-ServerMetadataNegativeTestJSON-1697762306-project-member] Lock "3bc646fe-4a97-4fff-a5ae-54b62a292c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.513484] env[68282]: DEBUG oslo_concurrency.lockutils [None req-532c7884-863e-439f-97ec-f5cb89a902ac tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Acquiring lock "fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.514176] env[68282]: DEBUG oslo_concurrency.lockutils [None req-532c7884-863e-439f-97ec-f5cb89a902ac tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.495728] env[68282]: DEBUG oslo_concurrency.lockutils [None req-11e79fb4-3c3b-4678-9ae5-1fef2a754b41 tempest-ServersTestBootFromVolume-335009676 tempest-ServersTestBootFromVolume-335009676-project-member] Acquiring lock "9ab68d2f-1a57-465d-8f18-bb3a81946499" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.495967] env[68282]: DEBUG oslo_concurrency.lockutils [None req-11e79fb4-3c3b-4678-9ae5-1fef2a754b41 tempest-ServersTestBootFromVolume-335009676 tempest-ServersTestBootFromVolume-335009676-project-member] Lock "9ab68d2f-1a57-465d-8f18-bb3a81946499" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.519333] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.551278] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.551510] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.551681] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.565818] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.566105] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.566339] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.566533] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 770.567688] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335c5efe-1595-4f8e-84ee-b254330c33c0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.577688] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7d5f8e-a9a6-4490-8097-cadaa8424742 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.592633] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5797fd8-22e5-4690-9b65-662f3f5a6a12 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.599479] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6b31cd-7ee6-4e7d-af46-279724368bb8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.633132] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180923MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 770.633313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.633524] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 13b2d77c-448b-4558-b5ef-005064806213 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9cda9e61-a903-4156-b797-121d7142c021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 167d0f09-4566-46f2-ab98-2acbc5810ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3653a48c-6da3-488a-9b7c-b722032e71ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c2cb0b72-896b-46c6-bb41-90cded35468b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.743169] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.770459] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.802424] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.816302] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.828650] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81731456-9c86-4e6f-ae95-7b7455f322d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.839388] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 85d9f1dc-0d1e-44e0-92cc-7d7511acb786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.853341] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9170a530-958f-46ec-a36c-d19baac1869c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.865114] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a4fed27d-e797-42d1-b738-8f24ebd708ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.881774] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 157e19df-fe8f-4287-9c5a-03eefdf05aa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.892634] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8be63333-3269-4158-8476-a3032a185131 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.904476] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 35669e1a-cb77-42be-9e1d-7300ea872d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.917595] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.932462] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ada46260-9977-4009-a4f1-c08f2222b6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.953595] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ac165200-e27f-4d58-83db-419b21a80862 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.973349] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3289ea9a-950e-4baa-8423-c00250207ef9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.994298] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3bc646fe-4a97-4fff-a5ae-54b62a292c21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.005165] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.019567] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9ab68d2f-1a57-465d-8f18-bb3a81946499 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.019880] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 771.020082] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 771.503457] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d9a14b-4b7b-4ac3-9e54-700a9db68725 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.512869] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42de43d-a1e8-4295-8945-39aa9343e6c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.547427] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe80b806-4a80-4fd9-ad71-c379b1ed4961 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.555359] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7406dff-1cc1-42a8-a037-e060e35898e2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.569734] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.580976] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 771.601878] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 771.601878] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.968s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.136901] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.138846] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.138846] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 772.138846] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 772.165190] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165190] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165190] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165190] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165190] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165508] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.165819] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.166093] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.166346] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.166637] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 772.169051] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 772.169051] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.169051] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.169051] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.169051] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 772.169051] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 773.195752] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4e6be5d4-6950-4048-a7ac-6f377ebed448 tempest-ServersNegativeTestJSON-1233658316 tempest-ServersNegativeTestJSON-1233658316-project-member] Acquiring lock "5c6abb06-dac1-4579-8fb1-4ea95a2240d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.195752] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4e6be5d4-6950-4048-a7ac-6f377ebed448 tempest-ServersNegativeTestJSON-1233658316 tempest-ServersNegativeTestJSON-1233658316-project-member] Lock "5c6abb06-dac1-4579-8fb1-4ea95a2240d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.605248] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2cd043e5-e24e-4272-afb6-b12a5ff0e17e tempest-ServerExternalEventsTest-17229807 tempest-ServerExternalEventsTest-17229807-project-member] Acquiring lock "af2dfe54-26e1-46c2-984d-94ce7e65cef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.605597] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2cd043e5-e24e-4272-afb6-b12a5ff0e17e tempest-ServerExternalEventsTest-17229807 tempest-ServerExternalEventsTest-17229807-project-member] Lock "af2dfe54-26e1-46c2-984d-94ce7e65cef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.852747] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ad885f7b-b9d3-429a-b082-f8c1c073c95a tempest-ServerDiagnosticsNegativeTest-1976461939 tempest-ServerDiagnosticsNegativeTest-1976461939-project-member] Acquiring lock "c5a6776c-88d0-49d2-8e02-05fd21161b44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.852993] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ad885f7b-b9d3-429a-b082-f8c1c073c95a tempest-ServerDiagnosticsNegativeTest-1976461939 tempest-ServerDiagnosticsNegativeTest-1976461939-project-member] Lock "c5a6776c-88d0-49d2-8e02-05fd21161b44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.753812] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6fa693d6-628d-49f3-9a7f-5f2203067209 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Acquiring lock "7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.753812] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6fa693d6-628d-49f3-9a7f-5f2203067209 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Lock "7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.679525] env[68282]: WARNING oslo_vmware.rw_handles [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 795.679525] env[68282]: ERROR oslo_vmware.rw_handles [ 795.680022] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 795.681682] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 795.681944] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Copying Virtual Disk [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/42baed49-d986-4dd9-b450-e079647c8610/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 795.682267] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42a12d74-42d5-4114-9069-5b7afc7d19ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.690349] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for the task: (returnval){ [ 795.690349] env[68282]: value = "task-3470460" [ 795.690349] env[68282]: _type = "Task" [ 795.690349] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.699931] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Task: {'id': task-3470460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.200764] env[68282]: DEBUG oslo_vmware.exceptions [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 796.200861] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.202976] env[68282]: ERROR nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.202976] env[68282]: Faults: ['InvalidArgument'] [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] Traceback (most recent call last): [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] yield resources [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self.driver.spawn(context, instance, image_meta, [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self._vmops.spawn(context, instance, image_meta, injected_files, [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self._fetch_image_if_missing(context, vi) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] image_cache(vi, tmp_image_ds_loc) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] vm_util.copy_virtual_disk( [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] session._wait_for_task(vmdk_copy_task) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return self.wait_for_task(task_ref) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return evt.wait() [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] result = hub.switch() [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return self.greenlet.switch() [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self.f(*self.args, **self.kw) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] raise exceptions.translate_fault(task_info.error) [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] Faults: ['InvalidArgument'] [ 796.202976] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] [ 796.202976] env[68282]: INFO nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Terminating instance [ 796.203799] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.203799] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.203799] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d363ff9-c2b0-4bd0-a2c9-2251050fad6d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.206141] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.206308] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquired lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.206474] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 796.212944] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.213139] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 796.213849] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af6e4c1-6a96-49fa-9209-22d9abd93788 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.220989] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Waiting for the task: (returnval){ [ 796.220989] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]529befcb-2acf-58db-241f-83291b2b4d04" [ 796.220989] env[68282]: _type = "Task" [ 796.220989] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.230698] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]529befcb-2acf-58db-241f-83291b2b4d04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.239827] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.331647] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.341249] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Releasing lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.341512] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 796.341704] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 796.343257] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dad463-fe6a-4182-be74-1bf443fc2f5f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.351210] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 796.351420] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ab43c15-2dcf-430e-b2fe-e6d4579b58d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.380291] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 796.380582] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 796.380858] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Deleting the datastore file [datastore2] 13b2d77c-448b-4558-b5ef-005064806213 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.381140] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ab62ef1-a467-455f-aaf5-459a12eab64d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.388241] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for the task: (returnval){ [ 796.388241] env[68282]: value = "task-3470462" [ 796.388241] env[68282]: _type = "Task" [ 796.388241] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.395421] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Task: {'id': task-3470462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.730924] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 796.731228] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Creating directory with path [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.731459] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06fcbd09-0b6b-4404-bd38-49b3008fc756 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.742630] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Created directory with path [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.742853] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Fetch image to [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 796.743051] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 796.743875] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f963d80-2175-41cb-9f7b-5a9deb343bef {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.750621] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742a9a28-6645-4aaf-abdd-28415e931224 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.759965] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54bac77-1c47-46e2-b861-164dd03140fb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.792047] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd83395-b6df-4ede-9a15-1d3c0b4195e1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.797890] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-15f62a1d-c9b6-4b7f-8667-16a8bbe83712 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.825159] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 796.878387] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 796.941281] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 796.941479] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 796.945552] env[68282]: DEBUG oslo_vmware.api [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Task: {'id': task-3470462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030932} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.945841] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.946058] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 796.946258] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 796.946441] env[68282]: INFO nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Took 0.60 seconds to destroy the instance on the hypervisor. [ 796.946713] env[68282]: DEBUG oslo.service.loopingcall [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.946931] env[68282]: DEBUG nova.compute.manager [-] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 796.951026] env[68282]: DEBUG nova.compute.claims [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 796.951203] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.951498] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.335833] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ebb486-2f5a-4985-ba71-f30615e9dd33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.343020] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c579dfe9-5437-40cf-bb05-d73d86dd0c65 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.373485] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd139751-3281-4706-8687-be8d737e3043 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.380544] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb12049-a66a-4a6e-a2d0-3f6cdf81622e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.393373] env[68282]: DEBUG nova.compute.provider_tree [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.402521] env[68282]: DEBUG nova.scheduler.client.report [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 797.416456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.465s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.417011] env[68282]: ERROR nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.417011] env[68282]: Faults: ['InvalidArgument'] [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] Traceback (most recent call last): [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self.driver.spawn(context, instance, image_meta, [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self._fetch_image_if_missing(context, vi) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] image_cache(vi, tmp_image_ds_loc) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] vm_util.copy_virtual_disk( [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] session._wait_for_task(vmdk_copy_task) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return self.wait_for_task(task_ref) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return evt.wait() [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] result = hub.switch() [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] return self.greenlet.switch() [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] self.f(*self.args, **self.kw) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] raise exceptions.translate_fault(task_info.error) [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] Faults: ['InvalidArgument'] [ 797.417011] env[68282]: ERROR nova.compute.manager [instance: 13b2d77c-448b-4558-b5ef-005064806213] [ 797.417769] env[68282]: DEBUG nova.compute.utils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 797.419162] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Build of instance 13b2d77c-448b-4558-b5ef-005064806213 was re-scheduled: A specified parameter was not correct: fileType [ 797.419162] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 797.419533] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 797.419758] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquiring lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.419907] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Acquired lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.420083] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 797.446612] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 797.510338] env[68282]: DEBUG nova.network.neutron [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.520954] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Releasing lock "refresh_cache-13b2d77c-448b-4558-b5ef-005064806213" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.521253] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 797.521453] env[68282]: DEBUG nova.compute.manager [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] [instance: 13b2d77c-448b-4558-b5ef-005064806213] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 797.626712] env[68282]: INFO nova.scheduler.client.report [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Deleted allocations for instance 13b2d77c-448b-4558-b5ef-005064806213 [ 797.649184] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc050a5c-293e-4fe2-b976-02d2c32efeaa tempest-ServerDiagnosticsV248Test-241301814 tempest-ServerDiagnosticsV248Test-241301814-project-member] Lock "13b2d77c-448b-4558-b5ef-005064806213" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.731s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.661436] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 797.707339] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.707645] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.709108] env[68282]: INFO nova.compute.claims [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.102012] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd950d39-0a02-440f-97bc-1b16ea475332 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.110030] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9ffb2f-19de-4935-9781-f1beddf03d07 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.139367] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68df9925-8a4f-4a99-8fbe-5906dee7fdc4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.146952] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01888bd5-8ec0-4828-bbc8-8b926c3b58ba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.161562] env[68282]: DEBUG nova.compute.provider_tree [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.171023] env[68282]: DEBUG nova.scheduler.client.report [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 798.184382] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.477s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.184938] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 798.222032] env[68282]: DEBUG nova.compute.utils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.222944] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 798.223138] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.233058] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 798.284045] env[68282]: DEBUG nova.policy [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeff02086d114be7816a6d2558c9c8fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2948d9c0a046a09077c014de41faeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 798.299038] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 798.325043] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 798.325262] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 798.325425] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.325606] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 798.325755] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.325905] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 798.326274] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 798.326274] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 798.326487] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 798.326678] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 798.326851] env[68282]: DEBUG nova.virt.hardware [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 798.327709] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c341c7e1-a7d3-4ae3-9bfd-f0160c7461e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.335873] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf420c7e-d34f-4beb-94c9-2510add5d5ac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.587842] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Successfully created port: 860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.212535] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Successfully updated port: 860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.229939] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.231503] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.231503] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.277090] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.459888] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Updating instance_info_cache with network_info: [{"id": "860f19c9-383e-43c5-8648-c5200f416f8c", "address": "fa:16:3e:9b:38:09", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap860f19c9-38", "ovs_interfaceid": "860f19c9-383e-43c5-8648-c5200f416f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.474846] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.474846] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance network_info: |[{"id": "860f19c9-383e-43c5-8648-c5200f416f8c", "address": "fa:16:3e:9b:38:09", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap860f19c9-38", "ovs_interfaceid": "860f19c9-383e-43c5-8648-c5200f416f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 799.474846] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:38:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '860f19c9-383e-43c5-8648-c5200f416f8c', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.482773] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating folder: Project (ea2948d9c0a046a09077c014de41faeb). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 799.484235] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ccf275a-eaa7-4bba-9ada-7b736706c461 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.487846] env[68282]: DEBUG nova.compute.manager [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Received event network-vif-plugged-860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 799.488118] env[68282]: DEBUG oslo_concurrency.lockutils [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] Acquiring lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.488617] env[68282]: DEBUG oslo_concurrency.lockutils [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.488617] env[68282]: DEBUG oslo_concurrency.lockutils [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.488788] env[68282]: DEBUG nova.compute.manager [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] No waiting events found dispatching network-vif-plugged-860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 799.488984] env[68282]: WARNING nova.compute.manager [req-8bed09b2-0345-4b3c-b635-8b414348f4bb req-96babcbb-7643-45f8-aa38-97f068020ef4 service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Received unexpected event network-vif-plugged-860f19c9-383e-43c5-8648-c5200f416f8c for instance with vm_state building and task_state spawning. [ 799.498665] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created folder: Project (ea2948d9c0a046a09077c014de41faeb) in parent group-v693573. [ 799.498858] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating folder: Instances. Parent ref: group-v693609. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 799.499102] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50e0e6bd-e64a-4f57-9858-8f25c0ce3cbe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.508609] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created folder: Instances in parent group-v693609. [ 799.508722] env[68282]: DEBUG oslo.service.loopingcall [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 799.508898] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 799.509108] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13ed192d-d45f-436d-a9bb-74dfcec4d039 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.528599] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.528599] env[68282]: value = "task-3470465" [ 799.528599] env[68282]: _type = "Task" [ 799.528599] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.537483] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470465, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.042022] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470465, 'name': CreateVM_Task, 'duration_secs': 0.422126} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.042022] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 800.042022] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.042022] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.042022] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 800.042022] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdd0e8ee-3e84-4ef4-a177-fda0e5ef84cf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.045356] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 800.045356] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b0736a-91fe-920d-e5d5-a4fbd62e74b9" [ 800.045356] env[68282]: _type = "Task" [ 800.045356] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.058457] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b0736a-91fe-920d-e5d5-a4fbd62e74b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.555947] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.556364] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.556431] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.171239] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.171538] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.584540] env[68282]: DEBUG nova.compute.manager [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Received event network-changed-860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 801.584953] env[68282]: DEBUG nova.compute.manager [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Refreshing instance network info cache due to event network-changed-860f19c9-383e-43c5-8648-c5200f416f8c. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 801.584953] env[68282]: DEBUG oslo_concurrency.lockutils [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] Acquiring lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.585175] env[68282]: DEBUG oslo_concurrency.lockutils [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] Acquired lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.585231] env[68282]: DEBUG nova.network.neutron [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Refreshing network info cache for port 860f19c9-383e-43c5-8648-c5200f416f8c {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 801.854436] env[68282]: DEBUG nova.network.neutron [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Updated VIF entry in instance network info cache for port 860f19c9-383e-43c5-8648-c5200f416f8c. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 801.854798] env[68282]: DEBUG nova.network.neutron [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Updating instance_info_cache with network_info: [{"id": "860f19c9-383e-43c5-8648-c5200f416f8c", "address": "fa:16:3e:9b:38:09", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap860f19c9-38", "ovs_interfaceid": "860f19c9-383e-43c5-8648-c5200f416f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.867742] env[68282]: DEBUG oslo_concurrency.lockutils [req-43e48a5f-078a-435d-9be9-47856822ad5b req-4940dd4f-afce-4b82-bbd2-e269ca26baae service nova] Releasing lock "refresh_cache-2b30ba17-99e9-44bc-bd78-73fe5d6cab05" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.087129] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.099903] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.100239] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.100441] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.100606] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 831.101859] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e975eff9-9482-4d91-ac7d-11d4bb7e8161 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.110698] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89aea19-7b5c-4978-9c09-1607cb8681f6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.125352] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c853610c-a9d8-45c7-8a52-f8c74be2c84f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.133059] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab762e3b-fae1-46cc-b1a0-112916c1bf39 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.168313] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180946MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 831.168483] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.168697] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.245368] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9cda9e61-a903-4156-b797-121d7142c021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.245538] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 167d0f09-4566-46f2-ab98-2acbc5810ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.245666] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3653a48c-6da3-488a-9b7c-b722032e71ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.245791] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.245916] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c2cb0b72-896b-46c6-bb41-90cded35468b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.246047] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.246175] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.246293] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.246409] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.246521] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 831.258784] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.270263] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.281070] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81731456-9c86-4e6f-ae95-7b7455f322d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.292213] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 85d9f1dc-0d1e-44e0-92cc-7d7511acb786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.303532] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9170a530-958f-46ec-a36c-d19baac1869c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.313721] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a4fed27d-e797-42d1-b738-8f24ebd708ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.324195] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 157e19df-fe8f-4287-9c5a-03eefdf05aa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.337954] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8be63333-3269-4158-8476-a3032a185131 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.349360] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 35669e1a-cb77-42be-9e1d-7300ea872d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.361258] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.373250] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ada46260-9977-4009-a4f1-c08f2222b6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.392179] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ac165200-e27f-4d58-83db-419b21a80862 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.403490] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3289ea9a-950e-4baa-8423-c00250207ef9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.416028] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3bc646fe-4a97-4fff-a5ae-54b62a292c21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.425041] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.435068] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9ab68d2f-1a57-465d-8f18-bb3a81946499 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.444731] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c6abb06-dac1-4579-8fb1-4ea95a2240d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.455534] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance af2dfe54-26e1-46c2-984d-94ce7e65cef1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.465256] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c5a6776c-88d0-49d2-8e02-05fd21161b44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.474809] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.484296] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 831.484688] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 831.484875] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 831.815053] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa56194d-eb90-4063-824f-687252521c5f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.822624] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a55465-99f3-4c6f-b7d9-fd1e7837c2d4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.851775] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eeafc13-fb8d-42f4-8c16-bcd91f341053 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.858927] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b22aea7-434f-4384-a701-5c164154da64 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.871768] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.880485] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 831.894996] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 831.895089] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.726s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.891315] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.891626] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.891733] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.891894] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.087696] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.087696] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 833.087696] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 833.107648] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.107814] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.107950] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108092] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108222] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108349] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108472] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108593] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108753] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.108920] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 833.109056] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 833.109554] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.109726] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.109912] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 833.110009] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 843.239233] env[68282]: WARNING oslo_vmware.rw_handles [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 843.239233] env[68282]: ERROR oslo_vmware.rw_handles [ 843.239856] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 843.241823] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 843.242135] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Copying Virtual Disk [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/0b2de42c-d10d-42ab-8417-9a8084347481/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 843.242534] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8918453-fc21-4b2f-9b52-7ae17743b9c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.250645] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Waiting for the task: (returnval){ [ 843.250645] env[68282]: value = "task-3470466" [ 843.250645] env[68282]: _type = "Task" [ 843.250645] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.259716] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Task: {'id': task-3470466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.760602] env[68282]: DEBUG oslo_vmware.exceptions [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 843.760918] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.761523] env[68282]: ERROR nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.761523] env[68282]: Faults: ['InvalidArgument'] [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Traceback (most recent call last): [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] yield resources [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self.driver.spawn(context, instance, image_meta, [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self._fetch_image_if_missing(context, vi) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] image_cache(vi, tmp_image_ds_loc) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] vm_util.copy_virtual_disk( [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] session._wait_for_task(vmdk_copy_task) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return self.wait_for_task(task_ref) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return evt.wait() [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] result = hub.switch() [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return self.greenlet.switch() [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self.f(*self.args, **self.kw) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] raise exceptions.translate_fault(task_info.error) [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Faults: ['InvalidArgument'] [ 843.761523] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] [ 843.762587] env[68282]: INFO nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Terminating instance [ 843.763419] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.763621] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.766031] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5231c641-40ad-4043-83e8-914c7dc5feec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.766183] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 843.766377] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.767102] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b79a7cf-e912-4bf6-9d73-567b5654f7ac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.773887] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 843.774140] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f0fd4c4-2c8e-42c1-bdd4-50c2d080ca78 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.776282] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.776458] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 843.777400] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa5e8ede-d732-46c0-a685-a7ab79390bef {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.782116] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Waiting for the task: (returnval){ [ 843.782116] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52663bd4-fb71-aecb-52bd-faf6158c34eb" [ 843.782116] env[68282]: _type = "Task" [ 843.782116] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.788924] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52663bd4-fb71-aecb-52bd-faf6158c34eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.836531] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 843.836726] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 843.836909] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Deleting the datastore file [datastore2] 167d0f09-4566-46f2-ab98-2acbc5810ce4 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.837197] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe94795f-31ff-4770-a11e-fe6e5fded6e4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.843305] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Waiting for the task: (returnval){ [ 843.843305] env[68282]: value = "task-3470468" [ 843.843305] env[68282]: _type = "Task" [ 843.843305] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.850650] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Task: {'id': task-3470468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.295261] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 844.295261] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Creating directory with path [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 844.295261] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee3bea4e-792f-48a1-937f-083e5c37abca {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.305215] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Created directory with path [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 844.305479] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Fetch image to [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 844.305642] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 844.306395] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b4633c-9508-440b-8a10-19884852f3e4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.313486] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32abd381-1614-4b50-acaf-95345e832a7f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.322715] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519bb43e-390c-443a-bfa1-5a55b0c0cf4e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.357018] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59462948-d97f-4d9c-b75b-0a09848e5df1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.365238] env[68282]: DEBUG oslo_vmware.api [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Task: {'id': task-3470468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072063} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.366838] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.367055] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 844.367235] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 844.367420] env[68282]: INFO nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 844.369994] env[68282]: DEBUG nova.compute.claims [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 844.369994] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.370100] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.372705] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b1487b9a-8570-41e3-af49-b9da554c3445 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.396138] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 844.466194] env[68282]: DEBUG oslo_vmware.rw_handles [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 844.529645] env[68282]: DEBUG oslo_vmware.rw_handles [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 844.529860] env[68282]: DEBUG oslo_vmware.rw_handles [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 844.832765] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb3e677-3c24-4dec-a945-acf6c6c8dba5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.841069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352e4b6f-21a1-4a69-acf3-99c034ecfce9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.872133] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793bc2ee-fe41-4ef6-b236-446666f23949 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.880022] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efb6901-23bb-4f1e-8c1b-b42dfe240ebf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.893933] env[68282]: DEBUG nova.compute.provider_tree [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.905010] env[68282]: DEBUG nova.scheduler.client.report [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 844.918407] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.548s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.919192] env[68282]: ERROR nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.919192] env[68282]: Faults: ['InvalidArgument'] [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Traceback (most recent call last): [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self.driver.spawn(context, instance, image_meta, [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self._fetch_image_if_missing(context, vi) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] image_cache(vi, tmp_image_ds_loc) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] vm_util.copy_virtual_disk( [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] session._wait_for_task(vmdk_copy_task) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return self.wait_for_task(task_ref) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return evt.wait() [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] result = hub.switch() [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] return self.greenlet.switch() [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] self.f(*self.args, **self.kw) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] raise exceptions.translate_fault(task_info.error) [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Faults: ['InvalidArgument'] [ 844.919192] env[68282]: ERROR nova.compute.manager [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] [ 844.920394] env[68282]: DEBUG nova.compute.utils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 844.921874] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Build of instance 167d0f09-4566-46f2-ab98-2acbc5810ce4 was re-scheduled: A specified parameter was not correct: fileType [ 844.921874] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 844.922454] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 844.922687] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 844.922908] env[68282]: DEBUG nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 844.923145] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 845.248933] env[68282]: DEBUG nova.network.neutron [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.260421] env[68282]: INFO nova.compute.manager [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] [instance: 167d0f09-4566-46f2-ab98-2acbc5810ce4] Took 0.34 seconds to deallocate network for instance. [ 845.371182] env[68282]: INFO nova.scheduler.client.report [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Deleted allocations for instance 167d0f09-4566-46f2-ab98-2acbc5810ce4 [ 845.394386] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5079c402-177f-4cba-8f67-df1bf7a0bf96 tempest-ImagesOneServerTestJSON-884144857 tempest-ImagesOneServerTestJSON-884144857-project-member] Lock "167d0f09-4566-46f2-ab98-2acbc5810ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.177s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.408558] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 845.456544] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.456807] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.458301] env[68282]: INFO nova.compute.claims [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.869842] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618430cf-8ddd-4d82-af75-33601e6c6121 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.877581] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4233af-f54e-463a-b0a6-09c480567cf7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.909995] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d779d0be-368b-4a5c-a163-b99526c1659f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.917289] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c63ab8-dc58-4909-bde8-d077de9858d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.932944] env[68282]: DEBUG nova.compute.provider_tree [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.944463] env[68282]: DEBUG nova.scheduler.client.report [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 845.960754] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.504s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.961375] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 845.996061] env[68282]: DEBUG nova.compute.utils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.997510] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 845.997682] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 846.008310] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 846.064361] env[68282]: DEBUG nova.policy [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16e5c0a7530e499ba7f30cb714f251dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1021604806f4a47b9e3d1c5f3691315', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 846.076532] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 846.103220] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.103572] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.103666] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.103852] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.104155] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.104355] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.104584] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.104745] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.104914] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.105239] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.105271] env[68282]: DEBUG nova.virt.hardware [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.106160] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4f101c-6d4f-430f-8543-e0b652186b36 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.114167] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e327db-9327-4624-9e17-0151e1720742 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.370063] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Successfully created port: 995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.383091] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Successfully updated port: 995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.396804] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.396956] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquired lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.397166] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 847.414622] env[68282]: DEBUG nova.compute.manager [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Received event network-vif-plugged-995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 847.414622] env[68282]: DEBUG oslo_concurrency.lockutils [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] Acquiring lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.414779] env[68282]: DEBUG oslo_concurrency.lockutils [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.415045] env[68282]: DEBUG oslo_concurrency.lockutils [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.415161] env[68282]: DEBUG nova.compute.manager [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] No waiting events found dispatching network-vif-plugged-995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.415324] env[68282]: WARNING nova.compute.manager [req-9424b448-d968-44a4-8553-f4136bcc2d3e req-c44b349d-4a0c-46bf-ac37-1f39de524cf3 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Received unexpected event network-vif-plugged-995211b2-815a-497c-acb6-5faf58882300 for instance with vm_state building and task_state spawning. [ 847.492582] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.760375] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Updating instance_info_cache with network_info: [{"id": "995211b2-815a-497c-acb6-5faf58882300", "address": "fa:16:3e:8c:44:6c", "network": {"id": "ac970264-5d8b-4aa1-a1ea-1a4ad2b8a21c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-326028190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1021604806f4a47b9e3d1c5f3691315", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995211b2-81", "ovs_interfaceid": "995211b2-815a-497c-acb6-5faf58882300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.779215] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Releasing lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.779530] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance network_info: |[{"id": "995211b2-815a-497c-acb6-5faf58882300", "address": "fa:16:3e:8c:44:6c", "network": {"id": "ac970264-5d8b-4aa1-a1ea-1a4ad2b8a21c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-326028190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1021604806f4a47b9e3d1c5f3691315", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995211b2-81", "ovs_interfaceid": "995211b2-815a-497c-acb6-5faf58882300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 847.779946] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:44:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b71230ae-e879-4384-88ce-fe64c86fce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '995211b2-815a-497c-acb6-5faf58882300', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.791794] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Creating folder: Project (e1021604806f4a47b9e3d1c5f3691315). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 847.793575] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86678e9e-98c6-4be5-ad96-e046f1d102aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.805535] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Created folder: Project (e1021604806f4a47b9e3d1c5f3691315) in parent group-v693573. [ 847.805535] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Creating folder: Instances. Parent ref: group-v693612. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 847.805784] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02d5a1ab-58bc-4374-9d17-4b0380c08469 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.816516] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Created folder: Instances in parent group-v693612. [ 847.816763] env[68282]: DEBUG oslo.service.loopingcall [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.816948] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 847.817165] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9af79787-25d0-4cd8-8c16-fe89da9a2744 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.841076] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.841076] env[68282]: value = "task-3470471" [ 847.841076] env[68282]: _type = "Task" [ 847.841076] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.850438] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470471, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.353605] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470471, 'name': CreateVM_Task, 'duration_secs': 0.277757} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.353891] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 848.354665] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.355264] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.355691] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.356033] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeeed8a6-53f3-4c76-b648-9c82a89919f0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.360914] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for the task: (returnval){ [ 848.360914] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ce6bf7-64ae-ead7-8bcd-8141244bde82" [ 848.360914] env[68282]: _type = "Task" [ 848.360914] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.371965] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ce6bf7-64ae-ead7-8bcd-8141244bde82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.872695] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.872695] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.872695] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.658032] env[68282]: DEBUG nova.compute.manager [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Received event network-changed-995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 849.658032] env[68282]: DEBUG nova.compute.manager [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Refreshing instance network info cache due to event network-changed-995211b2-815a-497c-acb6-5faf58882300. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 849.658032] env[68282]: DEBUG oslo_concurrency.lockutils [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] Acquiring lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.658032] env[68282]: DEBUG oslo_concurrency.lockutils [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] Acquired lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.658196] env[68282]: DEBUG nova.network.neutron [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Refreshing network info cache for port 995211b2-815a-497c-acb6-5faf58882300 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.925507] env[68282]: DEBUG nova.network.neutron [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Updated VIF entry in instance network info cache for port 995211b2-815a-497c-acb6-5faf58882300. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 849.925896] env[68282]: DEBUG nova.network.neutron [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Updating instance_info_cache with network_info: [{"id": "995211b2-815a-497c-acb6-5faf58882300", "address": "fa:16:3e:8c:44:6c", "network": {"id": "ac970264-5d8b-4aa1-a1ea-1a4ad2b8a21c", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-326028190-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1021604806f4a47b9e3d1c5f3691315", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b71230ae-e879-4384-88ce-fe64c86fce22", "external-id": "nsx-vlan-transportzone-473", "segmentation_id": 473, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap995211b2-81", "ovs_interfaceid": "995211b2-815a-497c-acb6-5faf58882300", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.935281] env[68282]: DEBUG oslo_concurrency.lockutils [req-1bffd2fc-3dc8-4493-a1c8-4b0dcf669755 req-0949cee8-64db-418b-931d-c5d97bcd9a93 service nova] Releasing lock "refresh_cache-b081435b-64e1-4baa-a634-a2f22a3d9a29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.283350] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "66243637-f1f4-4c60-b12a-bbe30c423630" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.284047] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.086598] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.098516] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.098751] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.098921] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.099095] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 891.100266] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba404d25-04e1-469b-a709-2da3e8362007 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.108658] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0387b0-3b69-469a-b989-0f7b2ffe7ec7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.122058] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aa1279-92ed-45d9-9d79-6d812e9ac262 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.128071] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0860145c-3082-45a0-b4d8-f70baa24f549 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.157855] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 891.158015] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.158283] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.189098] env[68282]: WARNING oslo_vmware.rw_handles [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 891.189098] env[68282]: ERROR oslo_vmware.rw_handles [ 891.189487] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 891.191084] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 891.191363] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Copying Virtual Disk [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/69505ae7-2138-4a20-95ea-ac7fc245f8d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 891.191874] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e3ebffd-b9f6-467e-aa74-2094b7b0d9e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.204126] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Waiting for the task: (returnval){ [ 891.204126] env[68282]: value = "task-3470472" [ 891.204126] env[68282]: _type = "Task" [ 891.204126] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.215039] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Task: {'id': task-3470472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.233310] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9cda9e61-a903-4156-b797-121d7142c021 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.233467] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3653a48c-6da3-488a-9b7c-b722032e71ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.233594] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.233722] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c2cb0b72-896b-46c6-bb41-90cded35468b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.233833] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.233952] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.234502] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.234502] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.234502] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.234502] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.245406] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.256277] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81731456-9c86-4e6f-ae95-7b7455f322d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.266548] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 85d9f1dc-0d1e-44e0-92cc-7d7511acb786 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.277183] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9170a530-958f-46ec-a36c-d19baac1869c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.286679] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a4fed27d-e797-42d1-b738-8f24ebd708ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.297142] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 157e19df-fe8f-4287-9c5a-03eefdf05aa4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.306840] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8be63333-3269-4158-8476-a3032a185131 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.317026] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 35669e1a-cb77-42be-9e1d-7300ea872d5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.326381] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.335732] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ada46260-9977-4009-a4f1-c08f2222b6e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.345060] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ac165200-e27f-4d58-83db-419b21a80862 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.355191] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3289ea9a-950e-4baa-8423-c00250207ef9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.365316] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3bc646fe-4a97-4fff-a5ae-54b62a292c21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.377028] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.387172] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9ab68d2f-1a57-465d-8f18-bb3a81946499 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.397304] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c6abb06-dac1-4579-8fb1-4ea95a2240d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.407183] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance af2dfe54-26e1-46c2-984d-94ce7e65cef1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.419432] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c5a6776c-88d0-49d2-8e02-05fd21161b44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.430190] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.440599] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.453145] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.453407] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 891.453579] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 891.715511] env[68282]: DEBUG oslo_vmware.exceptions [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 891.717815] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.718385] env[68282]: ERROR nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.718385] env[68282]: Faults: ['InvalidArgument'] [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] Traceback (most recent call last): [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] yield resources [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self.driver.spawn(context, instance, image_meta, [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self._vmops.spawn(context, instance, image_meta, injected_files, [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self._fetch_image_if_missing(context, vi) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] image_cache(vi, tmp_image_ds_loc) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] vm_util.copy_virtual_disk( [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] session._wait_for_task(vmdk_copy_task) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return self.wait_for_task(task_ref) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return evt.wait() [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] result = hub.switch() [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return self.greenlet.switch() [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self.f(*self.args, **self.kw) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] raise exceptions.translate_fault(task_info.error) [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] Faults: ['InvalidArgument'] [ 891.718385] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] [ 891.719388] env[68282]: INFO nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Terminating instance [ 891.720280] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.720488] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.721096] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 891.721319] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 891.721546] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c076c813-fa40-49cf-980e-8e53ed4ea55a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.723843] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9d6df5-d766-4037-8df8-ae42f2c12ca9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.730543] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 891.732933] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76f8298d-f488-40a2-9121-e4cdc63089d7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.735298] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.735481] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 891.736462] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-178bb9f9-94e7-4574-9940-1a4a58d74b31 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.741150] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 891.741150] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52eb28be-99da-28ef-3950-5771c621302c" [ 891.741150] env[68282]: _type = "Task" [ 891.741150] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.750758] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52eb28be-99da-28ef-3950-5771c621302c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.795363] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 891.795597] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 891.795783] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Deleting the datastore file [datastore2] 9cda9e61-a903-4156-b797-121d7142c021 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.796061] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b8a0964-df61-47ed-b7cd-35ec6856564e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.802183] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Waiting for the task: (returnval){ [ 891.802183] env[68282]: value = "task-3470474" [ 891.802183] env[68282]: _type = "Task" [ 891.802183] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.814622] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Task: {'id': task-3470474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.827411] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bcd8dc-6a7e-48a6-b474-7c5f3c9724f2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.834080] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b1a8ee-3e5e-4d7d-afec-b97989b31e58 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.864154] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09580bf9-3eee-49f4-9d6c-8bafb00696aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.872035] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2ca982-5e74-4803-841f-f0ed5803f7d1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.886259] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 891.894755] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 891.911417] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 891.911801] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.753s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.251655] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 892.251921] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating directory with path [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.252185] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-306ee657-fcf7-470c-8c6e-39810588937c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.263276] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created directory with path [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.263468] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Fetch image to [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 892.263695] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 892.264453] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bcd33a-809e-4f00-83b2-f00c84b2ae25 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.271055] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db257a2-cd0b-4dfb-9a58-11e0c3deab85 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.279847] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069604e0-dfe2-4b5a-9bb8-19fb35b72e5c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.313466] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee586dd-1750-4d98-b814-df5ae0d060c2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.320505] env[68282]: DEBUG oslo_vmware.api [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Task: {'id': task-3470474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079436} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.322161] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.322233] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 892.322385] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 892.322561] env[68282]: INFO nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Took 0.60 seconds to destroy the instance on the hypervisor. [ 892.324399] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8af0c74e-279c-4217-8ffc-09c39b0602c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.326396] env[68282]: DEBUG nova.compute.claims [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 892.326594] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.326815] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.350520] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 892.414762] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 892.480543] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 892.480718] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 892.780494] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1641e30-8642-4c88-9b45-e0dd20e30a94 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.788508] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e844d240-a73c-41ab-957e-fc4fa46069d1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.819336] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fe1275-2eed-4547-8a8f-02e42cfa1fc4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.826916] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cfb740-6ff8-42fc-a6d0-2896adc4de4d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.839862] env[68282]: DEBUG nova.compute.provider_tree [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.848454] env[68282]: DEBUG nova.scheduler.client.report [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 892.861985] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.535s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.862535] env[68282]: ERROR nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.862535] env[68282]: Faults: ['InvalidArgument'] [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] Traceback (most recent call last): [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self.driver.spawn(context, instance, image_meta, [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self._vmops.spawn(context, instance, image_meta, injected_files, [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self._fetch_image_if_missing(context, vi) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] image_cache(vi, tmp_image_ds_loc) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] vm_util.copy_virtual_disk( [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] session._wait_for_task(vmdk_copy_task) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return self.wait_for_task(task_ref) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return evt.wait() [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] result = hub.switch() [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] return self.greenlet.switch() [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] self.f(*self.args, **self.kw) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] raise exceptions.translate_fault(task_info.error) [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] Faults: ['InvalidArgument'] [ 892.862535] env[68282]: ERROR nova.compute.manager [instance: 9cda9e61-a903-4156-b797-121d7142c021] [ 892.863394] env[68282]: DEBUG nova.compute.utils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 892.864550] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Build of instance 9cda9e61-a903-4156-b797-121d7142c021 was re-scheduled: A specified parameter was not correct: fileType [ 892.864550] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 892.864963] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 892.865157] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 892.865322] env[68282]: DEBUG nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 892.865487] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 892.908207] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.908441] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 892.908599] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.087523] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.087676] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 893.353018] env[68282]: DEBUG nova.network.neutron [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.364531] env[68282]: INFO nova.compute.manager [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: 9cda9e61-a903-4156-b797-121d7142c021] Took 0.50 seconds to deallocate network for instance. [ 893.464651] env[68282]: INFO nova.scheduler.client.report [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Deleted allocations for instance 9cda9e61-a903-4156-b797-121d7142c021 [ 893.489831] env[68282]: DEBUG oslo_concurrency.lockutils [None req-86540ad1-2d0b-4725-9cf1-28fb13155522 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "9cda9e61-a903-4156-b797-121d7142c021" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 192.633s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.525215] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 893.576558] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.576827] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.578399] env[68282]: INFO nova.compute.claims [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.979133] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a7841f-40fe-4f87-a170-95380b051836 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.987660] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62f82660-1537-45c0-8f86-8223383e87a2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.017011] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de32386f-57db-4472-ae61-0dd23f0619a9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.024819] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92c82e2-d246-4536-bfc8-0500e9a489da {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.037394] env[68282]: DEBUG nova.compute.provider_tree [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.047037] env[68282]: DEBUG nova.scheduler.client.report [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 894.066811] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.490s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.067341] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 894.087639] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.087639] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.101597] env[68282]: DEBUG nova.compute.utils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.103044] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 894.103249] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 894.110983] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 894.165527] env[68282]: DEBUG nova.policy [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac57b628ea2c4f1b8dd05a694c85354f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e07f54d0c6e45268b807d63e2df4119', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 894.174331] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 894.203246] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 894.203562] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 894.203768] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 894.203993] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 894.204196] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 894.204392] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 894.204654] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 894.204861] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 894.205084] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 894.205295] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 894.205509] env[68282]: DEBUG nova.virt.hardware [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 894.206411] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c761d83-c728-4acf-ab73-b266d8f2a8fc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.218017] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c8d6eb-aff9-46a5-85df-c841de78999b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.471425] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Successfully created port: 3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.083429] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.115676] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.115676] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 895.115676] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 895.123598] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Successfully updated port: 3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.136491] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.136491] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquired lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.136491] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 895.141389] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.141650] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.141701] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.141822] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.141946] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.142109] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.142200] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.142388] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.142478] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.143819] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 895.143819] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 895.145094] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 895.185720] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.296491] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeae6255-1e7d-4706-959f-86410b020ff6 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Acquiring lock "b338f0ef-8361-40de-b45b-309cb87a17e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.296732] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeae6255-1e7d-4706-959f-86410b020ff6 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "b338f0ef-8361-40de-b45b-309cb87a17e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.361858] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Updating instance_info_cache with network_info: [{"id": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "address": "fa:16:3e:5d:06:6c", "network": {"id": "578a9d99-0794-4236-82b8-7c011b5bdb4a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-845548251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e07f54d0c6e45268b807d63e2df4119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3984c671-0b", "ovs_interfaceid": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.375933] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Releasing lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.376242] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance network_info: |[{"id": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "address": "fa:16:3e:5d:06:6c", "network": {"id": "578a9d99-0794-4236-82b8-7c011b5bdb4a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-845548251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e07f54d0c6e45268b807d63e2df4119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3984c671-0b", "ovs_interfaceid": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 895.376647] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:06:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e39ca24f-7890-4cdf-8dab-ecab218bb063', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3984c671-0bf5-4de6-837b-5bbfc3c3e419', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.384230] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Creating folder: Project (8e07f54d0c6e45268b807d63e2df4119). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 895.384652] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b5009b4-7bff-49e0-b7d6-76dc9cf17d37 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.395557] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Created folder: Project (8e07f54d0c6e45268b807d63e2df4119) in parent group-v693573. [ 895.395705] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Creating folder: Instances. Parent ref: group-v693615. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 895.395929] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b16ff76-4205-41b7-95df-b88ff7ffc07c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.407997] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Created folder: Instances in parent group-v693615. [ 895.407997] env[68282]: DEBUG oslo.service.loopingcall [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.407997] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 895.407997] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3956272f-6216-469f-b41a-d26de724ee9e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.426398] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.426398] env[68282]: value = "task-3470477" [ 895.426398] env[68282]: _type = "Task" [ 895.426398] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.434625] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470477, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.788851] env[68282]: DEBUG nova.compute.manager [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Received event network-vif-plugged-3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 895.789397] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Acquiring lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.789632] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.789805] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.789980] env[68282]: DEBUG nova.compute.manager [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] No waiting events found dispatching network-vif-plugged-3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 895.790222] env[68282]: WARNING nova.compute.manager [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Received unexpected event network-vif-plugged-3984c671-0bf5-4de6-837b-5bbfc3c3e419 for instance with vm_state building and task_state spawning. [ 895.790333] env[68282]: DEBUG nova.compute.manager [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Received event network-changed-3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 895.790494] env[68282]: DEBUG nova.compute.manager [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Refreshing instance network info cache due to event network-changed-3984c671-0bf5-4de6-837b-5bbfc3c3e419. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 895.790680] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Acquiring lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.790819] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Acquired lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.790978] env[68282]: DEBUG nova.network.neutron [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Refreshing network info cache for port 3984c671-0bf5-4de6-837b-5bbfc3c3e419 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 895.936266] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470477, 'name': CreateVM_Task, 'duration_secs': 0.309101} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.936446] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 895.937123] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.937291] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.937659] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.937908] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbafbcc2-58b8-4c5a-9b4d-ac138b5fa7cc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.942571] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for the task: (returnval){ [ 895.942571] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5274a6be-ab50-3163-4ff8-c9723c56a27e" [ 895.942571] env[68282]: _type = "Task" [ 895.942571] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.954837] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5274a6be-ab50-3163-4ff8-c9723c56a27e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.057856] env[68282]: DEBUG nova.network.neutron [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Updated VIF entry in instance network info cache for port 3984c671-0bf5-4de6-837b-5bbfc3c3e419. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 896.058242] env[68282]: DEBUG nova.network.neutron [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Updating instance_info_cache with network_info: [{"id": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "address": "fa:16:3e:5d:06:6c", "network": {"id": "578a9d99-0794-4236-82b8-7c011b5bdb4a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-845548251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e07f54d0c6e45268b807d63e2df4119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e39ca24f-7890-4cdf-8dab-ecab218bb063", "external-id": "nsx-vlan-transportzone-890", "segmentation_id": 890, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3984c671-0b", "ovs_interfaceid": "3984c671-0bf5-4de6-837b-5bbfc3c3e419", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.070483] env[68282]: DEBUG oslo_concurrency.lockutils [req-d3d6e66e-8af7-4d60-8bfb-bb15b4238a02 req-85d7d212-02ae-4861-af7a-be277a496d28 service nova] Releasing lock "refresh_cache-5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.453106] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.453389] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.453651] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.353428] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "3653a48c-6da3-488a-9b7c-b722032e71ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.420278] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.918836] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "c2cb0b72-896b-46c6-bb41-90cded35468b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.239011] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "25eddb82-c2b3-499f-afe0-5141b4624342" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.504182] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "650fcdae-dc95-4191-9696-3b6f004bdb62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.335784] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.225548] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "42977331-21c5-4169-889f-37dfbb10b6ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.916859] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.729562] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.259997] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.616844] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "eeb7149b-8d07-4968-9089-d6278c4565e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.617162] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.333772] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0ad470c5-fed7-4161-9939-69e2b2328fea tempest-ServersTestFqdnHostnames-663221872 tempest-ServersTestFqdnHostnames-663221872-project-member] Acquiring lock "c4ee8ebc-d940-4ada-b5ec-9a74439d3e87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.334097] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0ad470c5-fed7-4161-9939-69e2b2328fea tempest-ServersTestFqdnHostnames-663221872 tempest-ServersTestFqdnHostnames-663221872-project-member] Lock "c4ee8ebc-d940-4ada-b5ec-9a74439d3e87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.272126] env[68282]: WARNING oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 941.272126] env[68282]: ERROR oslo_vmware.rw_handles [ 941.272843] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 941.274264] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 941.274547] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Copying Virtual Disk [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/47b534df-72b6-446b-97a5-b4e1303306c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 941.274871] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5c426bd-8d13-4f91-bf12-5fa313fad82e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.283221] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 941.283221] env[68282]: value = "task-3470478" [ 941.283221] env[68282]: _type = "Task" [ 941.283221] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.291523] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470478, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.798699] env[68282]: DEBUG oslo_vmware.exceptions [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 941.800299] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.800299] env[68282]: ERROR nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.800299] env[68282]: Faults: ['InvalidArgument'] [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Traceback (most recent call last): [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] yield resources [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self.driver.spawn(context, instance, image_meta, [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self._fetch_image_if_missing(context, vi) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] image_cache(vi, tmp_image_ds_loc) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] vm_util.copy_virtual_disk( [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] session._wait_for_task(vmdk_copy_task) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return self.wait_for_task(task_ref) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return evt.wait() [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] result = hub.switch() [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return self.greenlet.switch() [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self.f(*self.args, **self.kw) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] raise exceptions.translate_fault(task_info.error) [ 941.800299] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 941.801510] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Faults: ['InvalidArgument'] [ 941.801510] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] [ 941.801510] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Terminating instance [ 941.801943] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.802293] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.802662] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bedd19f-10af-4656-b9b8-4cbe1bd3290f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.806952] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 941.807188] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 941.808525] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abac80b-1377-4f3d-b725-18b08f53dca1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.818614] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 941.819141] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93899fa3-bca8-49b1-987e-eed3e7eec025 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.820659] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.820836] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 941.821547] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f7ce73e-7c6b-4468-928d-150f7cbc6f8b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.826776] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 941.826776] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fc3c2d-e8aa-1952-c4e1-3d2767ca16ad" [ 941.826776] env[68282]: _type = "Task" [ 941.826776] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.836609] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fc3c2d-e8aa-1952-c4e1-3d2767ca16ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.893022] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.893248] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.893427] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleting the datastore file [datastore2] 9714bdd8-86ed-47eb-b703-efffe592aaf5 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.893698] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7509109c-f828-42b1-b570-54e015119967 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.900726] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 941.900726] env[68282]: value = "task-3470480" [ 941.900726] env[68282]: _type = "Task" [ 941.900726] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.910843] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.339621] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 942.340059] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating directory with path [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.340781] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd1c5ff9-2bde-4938-a4cb-ed20a6a2c119 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.354793] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Created directory with path [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.354997] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Fetch image to [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 942.355187] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 942.355956] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18621eeb-7eb2-42ac-a7a1-bf622d6cc183 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.365346] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b553b859-e641-4a3c-9e87-2bbf7cc90328 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.375958] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db743159-036f-4a09-8236-ce8961087697 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.414399] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbdcfb43-75e3-47e2-886e-ed5fc26e8698 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.422314] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072293} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.423654] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.423951] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 942.423951] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.424822] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 942.426039] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a509a776-4d69-489d-9feb-00a8fa7ba76f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.428402] env[68282]: DEBUG nova.compute.claims [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 942.428402] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.428503] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.453500] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 942.522920] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 942.590120] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 942.590326] env[68282]: DEBUG oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 942.969103] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d69d78-35cb-46ae-9dc5-2128ae76c2bc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.980689] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e819338a-6c2f-4e3d-aeff-e325d8e816ec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.017304] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f829fb4a-4c98-4ad8-925e-8f67339617e3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.025277] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc01b239-69d6-4199-b634-0a129577d5c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.039571] env[68282]: DEBUG nova.compute.provider_tree [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.051141] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 943.079277] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.651s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.080013] env[68282]: ERROR nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 943.080013] env[68282]: Faults: ['InvalidArgument'] [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Traceback (most recent call last): [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self.driver.spawn(context, instance, image_meta, [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self._fetch_image_if_missing(context, vi) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] image_cache(vi, tmp_image_ds_loc) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] vm_util.copy_virtual_disk( [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] session._wait_for_task(vmdk_copy_task) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return self.wait_for_task(task_ref) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return evt.wait() [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] result = hub.switch() [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] return self.greenlet.switch() [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] self.f(*self.args, **self.kw) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] raise exceptions.translate_fault(task_info.error) [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Faults: ['InvalidArgument'] [ 943.080013] env[68282]: ERROR nova.compute.manager [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] [ 943.081179] env[68282]: DEBUG nova.compute.utils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 943.082489] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Build of instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 was re-scheduled: A specified parameter was not correct: fileType [ 943.082489] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 943.083089] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 943.083495] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 943.083819] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 943.083927] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.906040] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.918072] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Took 0.83 seconds to deallocate network for instance. [ 944.066204] env[68282]: INFO nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleted allocations for instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 [ 944.117947] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.177s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.120570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 37.699s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.120570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.120570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.120570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.123200] env[68282]: INFO nova.compute.manager [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Terminating instance [ 944.125082] env[68282]: DEBUG nova.compute.manager [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 944.125284] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 944.125776] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caacab70-cf5d-47c9-b4dd-997f06d78b2c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.135504] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5edb35-806e-42c2-9cd6-c221b7eab769 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.148064] env[68282]: DEBUG nova.compute.manager [None req-744ae292-6e38-4398-adef-ae8782e07d64 tempest-TenantUsagesTestJSON-575767908 tempest-TenantUsagesTestJSON-575767908-project-member] [instance: 81731456-9c86-4e6f-ae95-7b7455f322d4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.171166] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9714bdd8-86ed-47eb-b703-efffe592aaf5 could not be found. [ 944.171845] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 944.171845] env[68282]: INFO nova.compute.manager [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 944.171990] env[68282]: DEBUG oslo.service.loopingcall [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.172436] env[68282]: DEBUG nova.compute.manager [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 944.172436] env[68282]: DEBUG nova.network.neutron [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 944.180727] env[68282]: DEBUG nova.compute.manager [None req-744ae292-6e38-4398-adef-ae8782e07d64 tempest-TenantUsagesTestJSON-575767908 tempest-TenantUsagesTestJSON-575767908-project-member] [instance: 81731456-9c86-4e6f-ae95-7b7455f322d4] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.218172] env[68282]: DEBUG nova.network.neutron [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.230142] env[68282]: DEBUG oslo_concurrency.lockutils [None req-744ae292-6e38-4398-adef-ae8782e07d64 tempest-TenantUsagesTestJSON-575767908 tempest-TenantUsagesTestJSON-575767908-project-member] Lock "81731456-9c86-4e6f-ae95-7b7455f322d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.645s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.231038] env[68282]: INFO nova.compute.manager [-] [instance: 9714bdd8-86ed-47eb-b703-efffe592aaf5] Took 0.06 seconds to deallocate network for instance. [ 944.243558] env[68282]: DEBUG nova.compute.manager [None req-75666080-7980-4dd5-8d63-eea322118851 tempest-VolumesAssistedSnapshotsTest-988209846 tempest-VolumesAssistedSnapshotsTest-988209846-project-member] [instance: 85d9f1dc-0d1e-44e0-92cc-7d7511acb786] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.274825] env[68282]: DEBUG nova.compute.manager [None req-75666080-7980-4dd5-8d63-eea322118851 tempest-VolumesAssistedSnapshotsTest-988209846 tempest-VolumesAssistedSnapshotsTest-988209846-project-member] [instance: 85d9f1dc-0d1e-44e0-92cc-7d7511acb786] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.317049] env[68282]: DEBUG oslo_concurrency.lockutils [None req-75666080-7980-4dd5-8d63-eea322118851 tempest-VolumesAssistedSnapshotsTest-988209846 tempest-VolumesAssistedSnapshotsTest-988209846-project-member] Lock "85d9f1dc-0d1e-44e0-92cc-7d7511acb786" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.208s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.339327] env[68282]: DEBUG nova.compute.manager [None req-b188c1ae-2041-4aaf-ab91-9d222af0405b tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9170a530-958f-46ec-a36c-d19baac1869c] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.380585] env[68282]: DEBUG nova.compute.manager [None req-b188c1ae-2041-4aaf-ab91-9d222af0405b tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9170a530-958f-46ec-a36c-d19baac1869c] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.411476] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b188c1ae-2041-4aaf-ab91-9d222af0405b tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9170a530-958f-46ec-a36c-d19baac1869c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.186s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.431955] env[68282]: DEBUG nova.compute.manager [None req-9c279fec-af52-481d-8ca4-31f301dd3e87 tempest-ServersV294TestFqdnHostnames-1599076239 tempest-ServersV294TestFqdnHostnames-1599076239-project-member] [instance: a4fed27d-e797-42d1-b738-8f24ebd708ac] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.443461] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e08b344d-dc88-45c2-bf00-abe399971790 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "9714bdd8-86ed-47eb-b703-efffe592aaf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.323s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.470496] env[68282]: DEBUG nova.compute.manager [None req-9c279fec-af52-481d-8ca4-31f301dd3e87 tempest-ServersV294TestFqdnHostnames-1599076239 tempest-ServersV294TestFqdnHostnames-1599076239-project-member] [instance: a4fed27d-e797-42d1-b738-8f24ebd708ac] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.504931] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9c279fec-af52-481d-8ca4-31f301dd3e87 tempest-ServersV294TestFqdnHostnames-1599076239 tempest-ServersV294TestFqdnHostnames-1599076239-project-member] Lock "a4fed27d-e797-42d1-b738-8f24ebd708ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.376s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.539956] env[68282]: DEBUG nova.compute.manager [None req-b292cb6a-f0f5-452b-97c8-7f88e0bc1cee tempest-InstanceActionsV221TestJSON-1450215027 tempest-InstanceActionsV221TestJSON-1450215027-project-member] [instance: 157e19df-fe8f-4287-9c5a-03eefdf05aa4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.574853] env[68282]: DEBUG nova.compute.manager [None req-b292cb6a-f0f5-452b-97c8-7f88e0bc1cee tempest-InstanceActionsV221TestJSON-1450215027 tempest-InstanceActionsV221TestJSON-1450215027-project-member] [instance: 157e19df-fe8f-4287-9c5a-03eefdf05aa4] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.614060] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b292cb6a-f0f5-452b-97c8-7f88e0bc1cee tempest-InstanceActionsV221TestJSON-1450215027 tempest-InstanceActionsV221TestJSON-1450215027-project-member] Lock "157e19df-fe8f-4287-9c5a-03eefdf05aa4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.223s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.639739] env[68282]: DEBUG oslo_concurrency.lockutils [None req-202c3962-c0eb-4c00-a639-7350fb3a83f2 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "15d79a66-cec9-4b7c-9680-ad5c125b4cad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.639739] env[68282]: DEBUG oslo_concurrency.lockutils [None req-202c3962-c0eb-4c00-a639-7350fb3a83f2 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "15d79a66-cec9-4b7c-9680-ad5c125b4cad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.639739] env[68282]: DEBUG nova.compute.manager [None req-2b383ce1-0e36-4c76-be3b-69f570caf383 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 8be63333-3269-4158-8476-a3032a185131] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.669720] env[68282]: DEBUG nova.compute.manager [None req-2b383ce1-0e36-4c76-be3b-69f570caf383 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 8be63333-3269-4158-8476-a3032a185131] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.698873] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2b383ce1-0e36-4c76-be3b-69f570caf383 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "8be63333-3269-4158-8476-a3032a185131" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.666s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.709127] env[68282]: DEBUG nova.compute.manager [None req-2c6175bb-481f-4ccb-a63e-45147280e2dc tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] [instance: 35669e1a-cb77-42be-9e1d-7300ea872d5f] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.745312] env[68282]: DEBUG nova.compute.manager [None req-2c6175bb-481f-4ccb-a63e-45147280e2dc tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] [instance: 35669e1a-cb77-42be-9e1d-7300ea872d5f] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.767893] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2c6175bb-481f-4ccb-a63e-45147280e2dc tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Lock "35669e1a-cb77-42be-9e1d-7300ea872d5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.907s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.779421] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.834503] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.834767] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.836236] env[68282]: INFO nova.compute.claims [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.384380] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899e9866-bc2d-4437-b403-2d41df32a715 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.392551] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6db691-45ec-4615-8db1-53b5ea05e3ed {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.430064] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de20450d-ec7a-4ebb-8886-a897adafe228 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.435897] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346ef1fa-5803-45d9-989e-2cbe14b0f66b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.450748] env[68282]: DEBUG nova.compute.provider_tree [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.462141] env[68282]: DEBUG nova.scheduler.client.report [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 945.480085] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.645s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.480610] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 945.530328] env[68282]: DEBUG nova.compute.utils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 945.531648] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 945.532279] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 945.542349] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 945.619067] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 945.655821] env[68282]: DEBUG nova.policy [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f9cc5527b3344bd1a50828a78b2fc1c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f5dd45dedb740ea808ab73d0ea5480a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.664614] env[68282]: DEBUG nova.virt.hardware [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.665383] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec530dd-0902-432e-b92f-ea16fa5cd6b2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.674226] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa253c32-02df-4cae-98aa-ea2ce294e02b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.014057] env[68282]: DEBUG oslo_concurrency.lockutils [None req-daa0feb7-bdcc-4451-83e8-3d326684ec92 tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Acquiring lock "994596dc-886c-457e-a634-129e416ce7b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.014451] env[68282]: DEBUG oslo_concurrency.lockutils [None req-daa0feb7-bdcc-4451-83e8-3d326684ec92 tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Lock "994596dc-886c-457e-a634-129e416ce7b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.879215] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Successfully created port: ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.741842] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Successfully updated port: ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.756129] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.756310] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquired lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.756465] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 949.046204] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.134278] env[68282]: DEBUG nova.compute.manager [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Received event network-vif-plugged-ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 949.134494] env[68282]: DEBUG oslo_concurrency.lockutils [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] Acquiring lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.134699] env[68282]: DEBUG oslo_concurrency.lockutils [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] Lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.134868] env[68282]: DEBUG oslo_concurrency.lockutils [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] Lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.135393] env[68282]: DEBUG nova.compute.manager [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] No waiting events found dispatching network-vif-plugged-ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 949.135619] env[68282]: WARNING nova.compute.manager [req-e0305702-447f-4694-80ca-3f0f58e387c4 req-418ffe84-ae09-4073-8b69-30ba3c9d360e service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Received unexpected event network-vif-plugged-ec9bc8d2-c0b3-467a-855b-281571a64d57 for instance with vm_state building and task_state spawning. [ 949.472106] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.472490] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.520421] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Updating instance_info_cache with network_info: [{"id": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "address": "fa:16:3e:99:fa:8b", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9bc8d2-c0", "ovs_interfaceid": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.538923] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Releasing lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.539304] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance network_info: |[{"id": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "address": "fa:16:3e:99:fa:8b", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9bc8d2-c0", "ovs_interfaceid": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 949.540525] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:fa:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a965790c-2d2f-4c2a-9ee7-745f4d53039b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec9bc8d2-c0b3-467a-855b-281571a64d57', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.551967] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Creating folder: Project (6f5dd45dedb740ea808ab73d0ea5480a). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 949.551967] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-234a4801-7ab4-4511-bbf8-c02fe9488654 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.562755] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Created folder: Project (6f5dd45dedb740ea808ab73d0ea5480a) in parent group-v693573. [ 949.562959] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Creating folder: Instances. Parent ref: group-v693618. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 949.563210] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fc6c131-078f-4665-9591-d357c5eb05c5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.572364] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Created folder: Instances in parent group-v693618. [ 949.573432] env[68282]: DEBUG oslo.service.loopingcall [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.573432] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 949.573432] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19ec782-3c58-49db-b6c9-3142b3d1afaf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.592647] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.592647] env[68282]: value = "task-3470483" [ 949.592647] env[68282]: _type = "Task" [ 949.592647] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.605094] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470483, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.088018] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.088594] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 950.107369] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470483, 'name': CreateVM_Task, 'duration_secs': 0.29359} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.108962] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 950.109766] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 950.111103] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.111103] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.111103] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.111541] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.111703] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 950.112628] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1436039-23ca-40c1-8ef0-12d79b52718b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.122965] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Waiting for the task: (returnval){ [ 950.122965] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5214e858-b4fd-3468-a914-866563be7d4d" [ 950.122965] env[68282]: _type = "Task" [ 950.122965] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.128706] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5214e858-b4fd-3468-a914-866563be7d4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.129781] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 950.634253] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.634613] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.635253] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.181571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "50234924-2933-4a79-9a33-3cb968b6e08a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.538801] env[68282]: DEBUG nova.compute.manager [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Received event network-changed-ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 951.538987] env[68282]: DEBUG nova.compute.manager [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Refreshing instance network info cache due to event network-changed-ec9bc8d2-c0b3-467a-855b-281571a64d57. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 951.539855] env[68282]: DEBUG oslo_concurrency.lockutils [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] Acquiring lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.540051] env[68282]: DEBUG oslo_concurrency.lockutils [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] Acquired lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.540241] env[68282]: DEBUG nova.network.neutron [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Refreshing network info cache for port ec9bc8d2-c0b3-467a-855b-281571a64d57 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 951.558021] env[68282]: DEBUG oslo_concurrency.lockutils [None req-cf94b487-547a-4ad9-b72c-329a4962d7a4 tempest-InstanceActionsNegativeTestJSON-1875884134 tempest-InstanceActionsNegativeTestJSON-1875884134-project-member] Acquiring lock "7be3eafd-b91b-4165-966c-aa056ea1a2dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.558189] env[68282]: DEBUG oslo_concurrency.lockutils [None req-cf94b487-547a-4ad9-b72c-329a4962d7a4 tempest-InstanceActionsNegativeTestJSON-1875884134 tempest-InstanceActionsNegativeTestJSON-1875884134-project-member] Lock "7be3eafd-b91b-4165-966c-aa056ea1a2dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.842133] env[68282]: DEBUG nova.network.neutron [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Updated VIF entry in instance network info cache for port ec9bc8d2-c0b3-467a-855b-281571a64d57. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 951.842517] env[68282]: DEBUG nova.network.neutron [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Updating instance_info_cache with network_info: [{"id": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "address": "fa:16:3e:99:fa:8b", "network": {"id": "d6dadeef-7a4e-4fe5-8734-9fdf459f4a62", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.147", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4b9c04f548524c84be5a344a65dca318", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a965790c-2d2f-4c2a-9ee7-745f4d53039b", "external-id": "nsx-vlan-transportzone-708", "segmentation_id": 708, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec9bc8d2-c0", "ovs_interfaceid": "ec9bc8d2-c0b3-467a-855b-281571a64d57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.856464] env[68282]: DEBUG oslo_concurrency.lockutils [req-44f532c7-5d8e-47e7-80c7-874be9180e50 req-ac8fb5fc-dca5-41bc-843e-c21602f04374 service nova] Releasing lock "refresh_cache-50234924-2933-4a79-9a33-3cb968b6e08a" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.139035] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 952.156836] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.157122] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.157305] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.157465] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 952.158631] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fdbe5c-da55-4da0-9122-62307ad5b0ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.167924] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51369620-b4af-4bc4-b5eb-169d8e03f1c3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.183750] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c023a3-a520-48d6-ac4f-823eefac5c42 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.190632] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254f4a07-1902-425d-be5e-523b91c80db0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.221252] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180939MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 952.221421] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.221631] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.307366] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3653a48c-6da3-488a-9b7c-b722032e71ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.307544] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c2cb0b72-896b-46c6-bb41-90cded35468b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.307678] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.307804] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.307927] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308056] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308181] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308299] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308416] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308533] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.320375] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance ac165200-e27f-4d58-83db-419b21a80862 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.331214] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3289ea9a-950e-4baa-8423-c00250207ef9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.343023] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3bc646fe-4a97-4fff-a5ae-54b62a292c21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.352796] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.364279] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9ab68d2f-1a57-465d-8f18-bb3a81946499 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.375026] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c6abb06-dac1-4579-8fb1-4ea95a2240d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.386830] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance af2dfe54-26e1-46c2-984d-94ce7e65cef1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.396232] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c5a6776c-88d0-49d2-8e02-05fd21161b44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.407744] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.420259] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.430875] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.440981] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b338f0ef-8361-40de-b45b-309cb87a17e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.452201] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.467416] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c4ee8ebc-d940-4ada-b5ec-9a74439d3e87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.477773] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 15d79a66-cec9-4b7c-9680-ad5c125b4cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.487411] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 994596dc-886c-457e-a634-129e416ce7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.499304] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.511179] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7be3eafd-b91b-4165-966c-aa056ea1a2dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.511420] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 952.511591] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 952.851520] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311300ba-b5e7-4c0c-b668-d0c47c4c1182 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.859094] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2293b73b-ef92-484b-aaa3-2866bd76f0f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.888301] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff402c5-31c9-42b8-b8b2-6832fbd42250 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.895373] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ad0a8b-ded5-44be-b4a2-fb25df824230 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.908216] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.917219] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 952.936763] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 952.936977] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.715s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.885217] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.083098] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.086899] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.086995] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.087319] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 955.087611] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 956.088752] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.087108] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.087342] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 957.087481] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 957.111530] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.111837] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.111877] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112020] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112137] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112263] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112385] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112501] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112617] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112748] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 957.112902] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 957.113441] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 961.595332] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03f11d3c-001a-4051-9ca9-51911c825938 tempest-ServersAdminNegativeTestJSON-813769931 tempest-ServersAdminNegativeTestJSON-813769931-project-member] Acquiring lock "4af66512-25b8-495c-8217-4b99d6db34d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.595999] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03f11d3c-001a-4051-9ca9-51911c825938 tempest-ServersAdminNegativeTestJSON-813769931 tempest-ServersAdminNegativeTestJSON-813769931-project-member] Lock "4af66512-25b8-495c-8217-4b99d6db34d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.817169] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76f65f3f-b2b7-4c64-b8ca-72c5fd727571 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Acquiring lock "8f42c3dc-715f-4e0a-b826-6917a74d85f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.817487] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76f65f3f-b2b7-4c64-b8ca-72c5fd727571 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Lock "8f42c3dc-715f-4e0a-b826-6917a74d85f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.954406] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a86f7592-9d39-4fb1-a7fb-3c82ac644ec3 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Acquiring lock "d18f6406-84a4-42c8-9508-50cf79fff0bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.954701] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a86f7592-9d39-4fb1-a7fb-3c82ac644ec3 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Lock "d18f6406-84a4-42c8-9508-50cf79fff0bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.552043] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0073bd9f-39ce-4189-aa5e-650146d564e3 tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Acquiring lock "768a616d-caac-48e2-8d6e-efe4c7e544a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.552363] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0073bd9f-39ce-4189-aa5e-650146d564e3 tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "768a616d-caac-48e2-8d6e-efe4c7e544a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.815598] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbbf8853-44cf-48c7-b201-8c76e7cfa3a7 tempest-ServerRescueTestJSON-1990750847 tempest-ServerRescueTestJSON-1990750847-project-member] Acquiring lock "25a8a015-8e33-4db5-a110-60cccf150165" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.815939] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbbf8853-44cf-48c7-b201-8c76e7cfa3a7 tempest-ServerRescueTestJSON-1990750847 tempest-ServerRescueTestJSON-1990750847-project-member] Lock "25a8a015-8e33-4db5-a110-60cccf150165" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.418774] env[68282]: DEBUG oslo_concurrency.lockutils [None req-68b797c5-d5fc-4c68-a2bc-59bf4d6ad392 tempest-ServersTestManualDisk-1673873711 tempest-ServersTestManualDisk-1673873711-project-member] Acquiring lock "d1a690ad-5c57-46c7-895f-025c787e5526" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.419155] env[68282]: DEBUG oslo_concurrency.lockutils [None req-68b797c5-d5fc-4c68-a2bc-59bf4d6ad392 tempest-ServersTestManualDisk-1673873711 tempest-ServersTestManualDisk-1673873711-project-member] Lock "d1a690ad-5c57-46c7-895f-025c787e5526" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.744561] env[68282]: DEBUG oslo_concurrency.lockutils [None req-55de8ccd-88a4-4406-afd4-1714b8bf6599 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Acquiring lock "c7247022-d5a2-41b4-a378-f056e7429c2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.744855] env[68282]: DEBUG oslo_concurrency.lockutils [None req-55de8ccd-88a4-4406-afd4-1714b8bf6599 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Lock "c7247022-d5a2-41b4-a378-f056e7429c2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.791867] env[68282]: WARNING oslo_vmware.rw_handles [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 990.791867] env[68282]: ERROR oslo_vmware.rw_handles [ 990.792570] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 990.794117] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 990.794449] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Copying Virtual Disk [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/0aa9449b-ff35-4814-9231-5ff9aff91a37/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 990.794761] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-235a4b92-dac6-4752-b0d3-c2b2831047b0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.804849] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 990.804849] env[68282]: value = "task-3470484" [ 990.804849] env[68282]: _type = "Task" [ 990.804849] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.814196] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.317651] env[68282]: DEBUG oslo_vmware.exceptions [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 991.317651] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.319389] env[68282]: ERROR nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.319389] env[68282]: Faults: ['InvalidArgument'] [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Traceback (most recent call last): [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] yield resources [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self.driver.spawn(context, instance, image_meta, [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self._fetch_image_if_missing(context, vi) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] image_cache(vi, tmp_image_ds_loc) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] vm_util.copy_virtual_disk( [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] session._wait_for_task(vmdk_copy_task) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return self.wait_for_task(task_ref) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return evt.wait() [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] result = hub.switch() [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return self.greenlet.switch() [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self.f(*self.args, **self.kw) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] raise exceptions.translate_fault(task_info.error) [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Faults: ['InvalidArgument'] [ 991.319389] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] [ 991.319389] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Terminating instance [ 991.320517] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.320517] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.320691] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0a4efd4-732a-439a-a592-74a1840f6777 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.323393] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 991.324017] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 991.324494] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7211b2d1-f146-4927-9db8-2fc653b99860 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.331751] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 991.332709] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2081af07-0a11-4465-88c5-3fbb50196f93 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.334090] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.335024] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 991.335024] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25169f72-b919-4072-a8db-2f162ca21b7b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.339961] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for the task: (returnval){ [ 991.339961] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5270f4a9-b5b6-66d8-2783-b9a63e10494c" [ 991.339961] env[68282]: _type = "Task" [ 991.339961] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.347053] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5270f4a9-b5b6-66d8-2783-b9a63e10494c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.406228] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 991.406462] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 991.406642] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleting the datastore file [datastore2] 3653a48c-6da3-488a-9b7c-b722032e71ce {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 991.406901] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6810852d-62f8-4216-8baa-d59b75ee2d9d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.413220] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for the task: (returnval){ [ 991.413220] env[68282]: value = "task-3470486" [ 991.413220] env[68282]: _type = "Task" [ 991.413220] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.422257] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.850719] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 991.851024] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Creating directory with path [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.851235] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c412951-297d-4273-9866-7cf43ffb168b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.862020] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Created directory with path [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.862271] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Fetch image to [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 991.862408] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 991.863164] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716ecc69-33d1-44fc-8021-2b76e5301ab0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.869470] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab5b987-cb77-4f4d-b3d5-7ed77e17ff8d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.878155] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c62014e-690d-4928-88ee-74b189ca8f24 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.909517] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbb3275-3c50-4cd3-b59e-63447bb58887 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.917681] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-50ab9e2e-733e-40cb-a6c9-40eacaa5f253 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.923768] env[68282]: DEBUG oslo_vmware.api [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Task: {'id': task-3470486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072573} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.924027] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.924224] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 991.924406] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 991.924578] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Took 0.60 seconds to destroy the instance on the hypervisor. [ 991.926696] env[68282]: DEBUG nova.compute.claims [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 991.926867] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.927089] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.943538] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 991.996181] env[68282]: DEBUG oslo_vmware.rw_handles [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 992.054627] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 992.058648] env[68282]: DEBUG oslo_vmware.rw_handles [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 992.058824] env[68282]: DEBUG oslo_vmware.rw_handles [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 992.071370] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 992.071580] env[68282]: DEBUG nova.compute.provider_tree [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.082671] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 992.099937] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 992.392967] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc64e73-d9f5-44ea-8ba1-f34ca260a0f2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.400326] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11022fc9-e110-4b94-afcb-893226e50ed5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.429364] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbb9eea-ea13-46b2-98a7-73fed57ee357 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.436454] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4f5539-d3f0-4480-8c83-a3a20d7c6b1c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.449568] env[68282]: DEBUG nova.compute.provider_tree [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.459426] env[68282]: DEBUG nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 992.473445] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.473979] env[68282]: ERROR nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.473979] env[68282]: Faults: ['InvalidArgument'] [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Traceback (most recent call last): [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self.driver.spawn(context, instance, image_meta, [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self._fetch_image_if_missing(context, vi) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] image_cache(vi, tmp_image_ds_loc) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] vm_util.copy_virtual_disk( [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] session._wait_for_task(vmdk_copy_task) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return self.wait_for_task(task_ref) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return evt.wait() [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] result = hub.switch() [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] return self.greenlet.switch() [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] self.f(*self.args, **self.kw) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] raise exceptions.translate_fault(task_info.error) [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Faults: ['InvalidArgument'] [ 992.473979] env[68282]: ERROR nova.compute.manager [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] [ 992.474906] env[68282]: DEBUG nova.compute.utils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 992.476420] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Build of instance 3653a48c-6da3-488a-9b7c-b722032e71ce was re-scheduled: A specified parameter was not correct: fileType [ 992.476420] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 992.476795] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 992.476969] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 992.477158] env[68282]: DEBUG nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 992.477370] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 992.800406] env[68282]: DEBUG nova.network.neutron [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.810803] env[68282]: INFO nova.compute.manager [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Took 0.33 seconds to deallocate network for instance. [ 992.902297] env[68282]: INFO nova.scheduler.client.report [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Deleted allocations for instance 3653a48c-6da3-488a-9b7c-b722032e71ce [ 992.924456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bcff10e0-4165-4210-8649-c1b0f7fd9dcb tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 285.059s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.925739] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 86.573s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.925989] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.926248] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.926429] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.929294] env[68282]: INFO nova.compute.manager [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Terminating instance [ 992.931274] env[68282]: DEBUG nova.compute.manager [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 992.931500] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 992.932044] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3642f26-1db3-43ee-9836-46f38464df59 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.938288] env[68282]: DEBUG nova.compute.manager [None req-2fca969e-fbb6-49c3-8caa-20f7681745da tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: ada46260-9977-4009-a4f1-c08f2222b6e8] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 992.946142] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e243d2-39c1-4e2b-abc5-cb76a31ce980 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.963939] env[68282]: DEBUG nova.compute.manager [None req-2fca969e-fbb6-49c3-8caa-20f7681745da tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: ada46260-9977-4009-a4f1-c08f2222b6e8] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 992.981663] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3653a48c-6da3-488a-9b7c-b722032e71ce could not be found. [ 992.982072] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.982449] env[68282]: INFO nova.compute.manager [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Took 0.05 seconds to destroy the instance on the hypervisor. [ 992.983530] env[68282]: DEBUG oslo.service.loopingcall [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.983530] env[68282]: DEBUG nova.compute.manager [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 992.983530] env[68282]: DEBUG nova.network.neutron [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.001128] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2fca969e-fbb6-49c3-8caa-20f7681745da tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "ada46260-9977-4009-a4f1-c08f2222b6e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.495s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.012227] env[68282]: DEBUG nova.compute.manager [None req-7d7283b4-bd63-45a4-8e7b-0e5ec8360a96 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: ac165200-e27f-4d58-83db-419b21a80862] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.018562] env[68282]: DEBUG nova.network.neutron [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.029664] env[68282]: INFO nova.compute.manager [-] [instance: 3653a48c-6da3-488a-9b7c-b722032e71ce] Took 0.05 seconds to deallocate network for instance. [ 993.061752] env[68282]: DEBUG nova.compute.manager [None req-7d7283b4-bd63-45a4-8e7b-0e5ec8360a96 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] [instance: ac165200-e27f-4d58-83db-419b21a80862] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.090068] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7d7283b4-bd63-45a4-8e7b-0e5ec8360a96 tempest-DeleteServersAdminTestJSON-159281533 tempest-DeleteServersAdminTestJSON-159281533-project-member] Lock "ac165200-e27f-4d58-83db-419b21a80862" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.819s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.099250] env[68282]: DEBUG nova.compute.manager [None req-67e5bd5e-11af-418f-b320-9cc2a7d36fac tempest-ServerActionsTestOtherA-1546743166 tempest-ServerActionsTestOtherA-1546743166-project-member] [instance: 3289ea9a-950e-4baa-8423-c00250207ef9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.128526] env[68282]: DEBUG nova.compute.manager [None req-67e5bd5e-11af-418f-b320-9cc2a7d36fac tempest-ServerActionsTestOtherA-1546743166 tempest-ServerActionsTestOtherA-1546743166-project-member] [instance: 3289ea9a-950e-4baa-8423-c00250207ef9] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.146822] env[68282]: DEBUG oslo_concurrency.lockutils [None req-703ced61-689c-4d7b-9e6f-f9d91057b1f7 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "3653a48c-6da3-488a-9b7c-b722032e71ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.156807] env[68282]: DEBUG oslo_concurrency.lockutils [None req-67e5bd5e-11af-418f-b320-9cc2a7d36fac tempest-ServerActionsTestOtherA-1546743166 tempest-ServerActionsTestOtherA-1546743166-project-member] Lock "3289ea9a-950e-4baa-8423-c00250207ef9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.803s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.165384] env[68282]: DEBUG nova.compute.manager [None req-078984b5-d61f-412f-8322-3e6e29dd5f3e tempest-ServerMetadataNegativeTestJSON-1697762306 tempest-ServerMetadataNegativeTestJSON-1697762306-project-member] [instance: 3bc646fe-4a97-4fff-a5ae-54b62a292c21] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.191913] env[68282]: DEBUG nova.compute.manager [None req-078984b5-d61f-412f-8322-3e6e29dd5f3e tempest-ServerMetadataNegativeTestJSON-1697762306 tempest-ServerMetadataNegativeTestJSON-1697762306-project-member] [instance: 3bc646fe-4a97-4fff-a5ae-54b62a292c21] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.213881] env[68282]: DEBUG oslo_concurrency.lockutils [None req-078984b5-d61f-412f-8322-3e6e29dd5f3e tempest-ServerMetadataNegativeTestJSON-1697762306 tempest-ServerMetadataNegativeTestJSON-1697762306-project-member] Lock "3bc646fe-4a97-4fff-a5ae-54b62a292c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.878s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.222569] env[68282]: DEBUG nova.compute.manager [None req-532c7884-863e-439f-97ec-f5cb89a902ac tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.248837] env[68282]: DEBUG nova.compute.manager [None req-532c7884-863e-439f-97ec-f5cb89a902ac tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.271253] env[68282]: DEBUG oslo_concurrency.lockutils [None req-532c7884-863e-439f-97ec-f5cb89a902ac tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "fd2ab355-0a3a-4a65-9f10-4ea3ba40dc28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.757s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.281036] env[68282]: DEBUG nova.compute.manager [None req-11e79fb4-3c3b-4678-9ae5-1fef2a754b41 tempest-ServersTestBootFromVolume-335009676 tempest-ServersTestBootFromVolume-335009676-project-member] [instance: 9ab68d2f-1a57-465d-8f18-bb3a81946499] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.305452] env[68282]: DEBUG nova.compute.manager [None req-11e79fb4-3c3b-4678-9ae5-1fef2a754b41 tempest-ServersTestBootFromVolume-335009676 tempest-ServersTestBootFromVolume-335009676-project-member] [instance: 9ab68d2f-1a57-465d-8f18-bb3a81946499] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.325599] env[68282]: DEBUG oslo_concurrency.lockutils [None req-11e79fb4-3c3b-4678-9ae5-1fef2a754b41 tempest-ServersTestBootFromVolume-335009676 tempest-ServersTestBootFromVolume-335009676-project-member] Lock "9ab68d2f-1a57-465d-8f18-bb3a81946499" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.830s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.333889] env[68282]: DEBUG nova.compute.manager [None req-4e6be5d4-6950-4048-a7ac-6f377ebed448 tempest-ServersNegativeTestJSON-1233658316 tempest-ServersNegativeTestJSON-1233658316-project-member] [instance: 5c6abb06-dac1-4579-8fb1-4ea95a2240d4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.362452] env[68282]: DEBUG nova.compute.manager [None req-4e6be5d4-6950-4048-a7ac-6f377ebed448 tempest-ServersNegativeTestJSON-1233658316 tempest-ServersNegativeTestJSON-1233658316-project-member] [instance: 5c6abb06-dac1-4579-8fb1-4ea95a2240d4] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.384317] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4e6be5d4-6950-4048-a7ac-6f377ebed448 tempest-ServersNegativeTestJSON-1233658316 tempest-ServersNegativeTestJSON-1233658316-project-member] Lock "5c6abb06-dac1-4579-8fb1-4ea95a2240d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.189s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.395100] env[68282]: DEBUG nova.compute.manager [None req-2cd043e5-e24e-4272-afb6-b12a5ff0e17e tempest-ServerExternalEventsTest-17229807 tempest-ServerExternalEventsTest-17229807-project-member] [instance: af2dfe54-26e1-46c2-984d-94ce7e65cef1] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.417907] env[68282]: DEBUG nova.compute.manager [None req-2cd043e5-e24e-4272-afb6-b12a5ff0e17e tempest-ServerExternalEventsTest-17229807 tempest-ServerExternalEventsTest-17229807-project-member] [instance: af2dfe54-26e1-46c2-984d-94ce7e65cef1] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.438135] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2cd043e5-e24e-4272-afb6-b12a5ff0e17e tempest-ServerExternalEventsTest-17229807 tempest-ServerExternalEventsTest-17229807-project-member] Lock "af2dfe54-26e1-46c2-984d-94ce7e65cef1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.833s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.447251] env[68282]: DEBUG nova.compute.manager [None req-ad885f7b-b9d3-429a-b082-f8c1c073c95a tempest-ServerDiagnosticsNegativeTest-1976461939 tempest-ServerDiagnosticsNegativeTest-1976461939-project-member] [instance: c5a6776c-88d0-49d2-8e02-05fd21161b44] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.471418] env[68282]: DEBUG nova.compute.manager [None req-ad885f7b-b9d3-429a-b082-f8c1c073c95a tempest-ServerDiagnosticsNegativeTest-1976461939 tempest-ServerDiagnosticsNegativeTest-1976461939-project-member] [instance: c5a6776c-88d0-49d2-8e02-05fd21161b44] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.499710] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ad885f7b-b9d3-429a-b082-f8c1c073c95a tempest-ServerDiagnosticsNegativeTest-1976461939 tempest-ServerDiagnosticsNegativeTest-1976461939-project-member] Lock "c5a6776c-88d0-49d2-8e02-05fd21161b44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.647s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.509527] env[68282]: DEBUG nova.compute.manager [None req-6fa693d6-628d-49f3-9a7f-5f2203067209 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] [instance: 7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.533938] env[68282]: DEBUG nova.compute.manager [None req-6fa693d6-628d-49f3-9a7f-5f2203067209 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] [instance: 7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 993.557107] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6fa693d6-628d-49f3-9a7f-5f2203067209 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Lock "7dd91b4e-e702-4c77-a8a4-3dd7f29b3bcc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.806s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.566181] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.625276] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.625546] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.627052] env[68282]: INFO nova.compute.claims [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.970731] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41a3e07-4fe7-469e-8af6-47bcd309ac82 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.978645] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fdcd22-c69e-404f-a178-cdf947ee6eab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.011434] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85289a0-4e6a-442a-86dc-40b89cdf28b9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.019438] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3baa74b4-cc8f-4b52-9734-578f14138162 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.033439] env[68282]: DEBUG nova.compute.provider_tree [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.042911] env[68282]: DEBUG nova.scheduler.client.report [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 994.057927] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.432s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.058522] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 994.098284] env[68282]: DEBUG nova.compute.utils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.099561] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 994.107849] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 994.177302] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 994.201512] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.201803] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.202016] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.202226] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.202431] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.202600] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.202812] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.202983] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.203418] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.203771] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.203851] env[68282]: DEBUG nova.virt.hardware [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.204778] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac395fb8-8c22-407d-8492-46bee57f7e63 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.212630] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e57aa9b-d1cc-4962-bad6-216747d52e80 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.228408] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.233981] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Creating folder: Project (81d0f2ea568947dbaee90f4e3e8d48a7). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 994.234270] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5e9ad10-a16f-435b-a127-1cf0e6d09288 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.244258] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Created folder: Project (81d0f2ea568947dbaee90f4e3e8d48a7) in parent group-v693573. [ 994.244450] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Creating folder: Instances. Parent ref: group-v693621. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 994.244674] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3c1b072-2ba6-4716-9659-eba95e8f4c80 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.253436] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Created folder: Instances in parent group-v693621. [ 994.253665] env[68282]: DEBUG oslo.service.loopingcall [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.253847] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 994.254045] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67f3e17d-a049-4ca5-8e61-30f9f6dfc580 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.270135] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.270135] env[68282]: value = "task-3470489" [ 994.270135] env[68282]: _type = "Task" [ 994.270135] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.277303] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470489, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.618544] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.618779] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.643254] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Acquiring lock "263caec2-5b70-4a83-9567-057ea7526bf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.643476] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "263caec2-5b70-4a83-9567-057ea7526bf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.780690] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470489, 'name': CreateVM_Task, 'duration_secs': 0.244434} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.780856] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 994.781368] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.781528] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.781840] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 994.782121] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56d335ed-d1bd-49b5-bab7-0312364b634b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.786419] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for the task: (returnval){ [ 994.786419] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]528c374a-3df0-1bd5-0f1c-c6116c969df1" [ 994.786419] env[68282]: _type = "Task" [ 994.786419] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.793927] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]528c374a-3df0-1bd5-0f1c-c6116c969df1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.297241] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.297528] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 995.297716] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.417557] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.087793] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1012.101191] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.101388] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.101564] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.101723] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1012.103279] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7648834a-d9c4-4a54-8a2c-2f0d0b6511d0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.112266] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e2fd01-c4d5-4994-9fb1-5e684f25d760 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.127809] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670042a4-aa84-459d-a91a-935e93f7bf05 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.134112] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bef813-75a4-4267-bb18-9584d5b07e4a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.162810] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180904MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1012.162968] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.163219] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.238483] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c2cb0b72-896b-46c6-bb41-90cded35468b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.238648] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.238780] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.238906] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239038] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239160] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239278] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239392] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239506] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.239616] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1012.250271] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.260725] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b338f0ef-8361-40de-b45b-309cb87a17e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.270965] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.285011] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c4ee8ebc-d940-4ada-b5ec-9a74439d3e87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.296040] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 15d79a66-cec9-4b7c-9680-ad5c125b4cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.306012] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 994596dc-886c-457e-a634-129e416ce7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.316362] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.326417] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7be3eafd-b91b-4165-966c-aa056ea1a2dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.337008] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4af66512-25b8-495c-8217-4b99d6db34d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.347113] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8f42c3dc-715f-4e0a-b826-6917a74d85f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.356409] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d18f6406-84a4-42c8-9508-50cf79fff0bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.365743] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 768a616d-caac-48e2-8d6e-efe4c7e544a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.375575] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25a8a015-8e33-4db5-a110-60cccf150165 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.385957] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d1a690ad-5c57-46c7-895f-025c787e5526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.396609] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c7247022-d5a2-41b4-a378-f056e7429c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.406936] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.416362] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 263caec2-5b70-4a83-9567-057ea7526bf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.416595] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1012.416742] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1012.744092] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc12790-27de-4968-bd62-e04d1620b8d7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.751633] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf21e242-8741-4112-84a3-307c9d180e5f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.781083] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65d8c0b-689c-42f3-81db-d3148c757b91 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.790273] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f1f77b-f799-41aa-80e7-9c0f89fa4410 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.801874] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.811060] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1012.826464] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1012.826678] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.663s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.826587] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.084149] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.086715] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.086907] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.087157] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.087450] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1018.089033] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.089033] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1018.089033] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1018.108619] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.108789] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.108924] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109295] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109473] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109605] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109730] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109854] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.109976] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.110201] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1018.110343] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1018.110831] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.111015] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.106578] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.249204] env[68282]: WARNING oslo_vmware.rw_handles [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1041.249204] env[68282]: ERROR oslo_vmware.rw_handles [ 1041.249204] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1041.251295] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1041.251548] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Copying Virtual Disk [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/ee05010c-370c-47d5-b412-9e51dee945e6/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1041.251839] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75531b19-22fc-4b98-814d-8b753e439220 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.261609] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for the task: (returnval){ [ 1041.261609] env[68282]: value = "task-3470490" [ 1041.261609] env[68282]: _type = "Task" [ 1041.261609] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.271273] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Task: {'id': task-3470490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.777031] env[68282]: DEBUG oslo_vmware.exceptions [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1041.777031] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.777031] env[68282]: ERROR nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1041.777031] env[68282]: Faults: ['InvalidArgument'] [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Traceback (most recent call last): [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] yield resources [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self.driver.spawn(context, instance, image_meta, [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self._fetch_image_if_missing(context, vi) [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] image_cache(vi, tmp_image_ds_loc) [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] vm_util.copy_virtual_disk( [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] session._wait_for_task(vmdk_copy_task) [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return self.wait_for_task(task_ref) [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return evt.wait() [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] result = hub.switch() [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return self.greenlet.switch() [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1041.777031] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self.f(*self.args, **self.kw) [ 1041.778271] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1041.778271] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] raise exceptions.translate_fault(task_info.error) [ 1041.778271] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1041.778271] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Faults: ['InvalidArgument'] [ 1041.778271] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] [ 1041.778271] env[68282]: INFO nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Terminating instance [ 1041.778271] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.778271] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.778271] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1373213e-4227-49f3-a9d6-3efb59212f9b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.779892] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1041.780104] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1041.780873] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaf431b-c95e-49f2-9ef4-15a83b6ce147 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.787772] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1041.787966] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-249f66e3-6214-4b40-9cb8-1f56e7b6b002 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.790229] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.790403] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1041.791356] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6806624b-7fb5-4e46-8cd6-66c4e677bcd5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.796048] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for the task: (returnval){ [ 1041.796048] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52055295-db90-ee5e-48a0-9b090bcaaf02" [ 1041.796048] env[68282]: _type = "Task" [ 1041.796048] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.808831] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52055295-db90-ee5e-48a0-9b090bcaaf02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.854159] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1041.854352] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1041.854539] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Deleting the datastore file [datastore2] c2cb0b72-896b-46c6-bb41-90cded35468b {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1041.854805] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecc1ee42-13a5-419c-b8d4-8cb289c14964 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.860318] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for the task: (returnval){ [ 1041.860318] env[68282]: value = "task-3470492" [ 1041.860318] env[68282]: _type = "Task" [ 1041.860318] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.867599] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Task: {'id': task-3470492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.306283] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1042.306579] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Creating directory with path [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.306806] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ad53ce1-d766-419a-90a6-61d32a777936 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.318993] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Created directory with path [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.319225] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Fetch image to [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1042.319398] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1042.320166] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf8b4f7-4509-4f5c-82e7-d32a61cc0b9f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.326851] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00562276-dfaf-4f68-857c-5df4d05d62b5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.335921] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591740b4-6c7a-4c29-bfb2-5cc73094ee23 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.370737] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63234083-3cde-43ea-88ce-16e73d001e5b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.379882] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-44100c99-c138-4562-bc73-551eee1d79c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.381586] env[68282]: DEBUG oslo_vmware.api [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Task: {'id': task-3470492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072903} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.381864] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1042.382086] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1042.382341] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1042.382535] env[68282]: INFO nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1042.386787] env[68282]: DEBUG nova.compute.claims [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1042.386964] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.387194] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.404831] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1042.538757] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1042.602974] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1042.602974] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1042.805339] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bd8643-3146-4cb2-b469-726c5f4a18f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.813231] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088ffc4d-3d74-4219-b66f-002a999ed3fd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.843695] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71828dec-325b-4168-a3bb-1581ec471fea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.850761] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd5df0-8cb0-46ae-ac31-ba7c245dc85f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.864960] env[68282]: DEBUG nova.compute.provider_tree [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.873204] env[68282]: DEBUG nova.scheduler.client.report [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1042.889631] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.502s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.890187] env[68282]: ERROR nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1042.890187] env[68282]: Faults: ['InvalidArgument'] [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Traceback (most recent call last): [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self.driver.spawn(context, instance, image_meta, [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self._fetch_image_if_missing(context, vi) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] image_cache(vi, tmp_image_ds_loc) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] vm_util.copy_virtual_disk( [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] session._wait_for_task(vmdk_copy_task) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return self.wait_for_task(task_ref) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return evt.wait() [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] result = hub.switch() [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] return self.greenlet.switch() [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] self.f(*self.args, **self.kw) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] raise exceptions.translate_fault(task_info.error) [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Faults: ['InvalidArgument'] [ 1042.890187] env[68282]: ERROR nova.compute.manager [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] [ 1042.891020] env[68282]: DEBUG nova.compute.utils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1042.892449] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Build of instance c2cb0b72-896b-46c6-bb41-90cded35468b was re-scheduled: A specified parameter was not correct: fileType [ 1042.892449] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1042.892828] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1042.893011] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1042.893180] env[68282]: DEBUG nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1042.893355] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1043.205607] env[68282]: DEBUG nova.network.neutron [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.219325] env[68282]: INFO nova.compute.manager [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Took 0.33 seconds to deallocate network for instance. [ 1043.325392] env[68282]: INFO nova.scheduler.client.report [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Deleted allocations for instance c2cb0b72-896b-46c6-bb41-90cded35468b [ 1043.346602] env[68282]: DEBUG oslo_concurrency.lockutils [None req-87f000e2-478a-4931-a717-bb85ce7ed6e1 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 329.316s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.347787] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 131.429s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.348017] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Acquiring lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.348665] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.348665] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.351706] env[68282]: INFO nova.compute.manager [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Terminating instance [ 1043.352718] env[68282]: DEBUG nova.compute.manager [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1043.352863] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1043.353361] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9446ad87-73d4-4339-b077-6c3397d21e66 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.362738] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d79378-be84-49e3-b2da-2fca761898be {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.374189] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1043.394313] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2cb0b72-896b-46c6-bb41-90cded35468b could not be found. [ 1043.394527] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1043.394712] env[68282]: INFO nova.compute.manager [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1043.394972] env[68282]: DEBUG oslo.service.loopingcall [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1043.395210] env[68282]: DEBUG nova.compute.manager [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1043.395309] env[68282]: DEBUG nova.network.neutron [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1043.424007] env[68282]: DEBUG nova.network.neutron [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.430355] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.430614] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.432084] env[68282]: INFO nova.compute.claims [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.435324] env[68282]: INFO nova.compute.manager [-] [instance: c2cb0b72-896b-46c6-bb41-90cded35468b] Took 0.04 seconds to deallocate network for instance. [ 1043.556248] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c7147e29-c83f-490b-b3a9-a43c233cfdc8 tempest-ServerDiagnosticsTest-715202704 tempest-ServerDiagnosticsTest-715202704-project-member] Lock "c2cb0b72-896b-46c6-bb41-90cded35468b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.809494] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dade580-35ed-4696-906f-0dc17a5124fb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.817134] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca660c01-20cd-4beb-be42-9284d94a91c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.847056] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef74c7c-1d97-4e92-925e-735becf94e6b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.854312] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebff2499-e44c-488b-90cc-7a19b3bb7d4a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.868017] env[68282]: DEBUG nova.compute.provider_tree [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.876125] env[68282]: DEBUG nova.scheduler.client.report [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1043.892077] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.461s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.892584] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1043.924391] env[68282]: DEBUG nova.compute.utils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1043.925830] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1043.926014] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1043.937925] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1043.991947] env[68282]: DEBUG nova.policy [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90ed5bcda1ea450394d5d89aa4b47421', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4476d0cb39524d6ea6e463571c775360', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1044.006869] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1044.035788] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1044.036100] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1044.036291] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.036521] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1044.036690] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.036865] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1044.037114] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1044.037295] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1044.037483] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1044.037675] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1044.037863] env[68282]: DEBUG nova.virt.hardware [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1044.038824] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19ac5b5-ced4-480f-bf7b-8d334c0dce1f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.049481] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b05fe9-913d-45ed-9df4-6f8d73b46d0f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.308832] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Successfully created port: d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.182750] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Successfully updated port: d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1045.198196] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.198901] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquired lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.198901] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.245232] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1045.416634] env[68282]: DEBUG nova.compute.manager [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Received event network-vif-plugged-d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1045.416882] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Acquiring lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.417105] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.417281] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.417449] env[68282]: DEBUG nova.compute.manager [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] No waiting events found dispatching network-vif-plugged-d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1045.417660] env[68282]: WARNING nova.compute.manager [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Received unexpected event network-vif-plugged-d875d300-ebfd-4ef6-9566-3a6a39b6159e for instance with vm_state building and task_state spawning. [ 1045.417772] env[68282]: DEBUG nova.compute.manager [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Received event network-changed-d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1045.417996] env[68282]: DEBUG nova.compute.manager [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Refreshing instance network info cache due to event network-changed-d875d300-ebfd-4ef6-9566-3a6a39b6159e. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1045.418137] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Acquiring lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.426802] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Updating instance_info_cache with network_info: [{"id": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "address": "fa:16:3e:84:55:2b", "network": {"id": "3386090a-7f8d-4d16-aa7c-f3f61f0acc1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1001357851-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4476d0cb39524d6ea6e463571c775360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6046aec4-feda-4ef9-bf4a-800de1e0cd3b", "external-id": "nsx-vlan-transportzone-903", "segmentation_id": 903, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd875d300-eb", "ovs_interfaceid": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.440481] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Releasing lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.440746] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance network_info: |[{"id": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "address": "fa:16:3e:84:55:2b", "network": {"id": "3386090a-7f8d-4d16-aa7c-f3f61f0acc1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1001357851-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4476d0cb39524d6ea6e463571c775360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6046aec4-feda-4ef9-bf4a-800de1e0cd3b", "external-id": "nsx-vlan-transportzone-903", "segmentation_id": 903, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd875d300-eb", "ovs_interfaceid": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1045.441040] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Acquired lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.441224] env[68282]: DEBUG nova.network.neutron [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Refreshing network info cache for port d875d300-ebfd-4ef6-9566-3a6a39b6159e {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1045.442296] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:55:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6046aec4-feda-4ef9-bf4a-800de1e0cd3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd875d300-ebfd-4ef6-9566-3a6a39b6159e', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.449738] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Creating folder: Project (4476d0cb39524d6ea6e463571c775360). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.450682] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a559b37-2170-4a43-b3ae-c9be4e1ff71e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.466187] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Created folder: Project (4476d0cb39524d6ea6e463571c775360) in parent group-v693573. [ 1045.466389] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Creating folder: Instances. Parent ref: group-v693624. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.466629] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bf80f4e-3402-4775-b5e1-cdbd38a8b58b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.474671] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Created folder: Instances in parent group-v693624. [ 1045.474916] env[68282]: DEBUG oslo.service.loopingcall [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.475120] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1045.475309] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff356a72-791f-4035-a5b1-1c94a84d6058 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.495712] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.495712] env[68282]: value = "task-3470495" [ 1045.495712] env[68282]: _type = "Task" [ 1045.495712] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.506572] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470495, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.723747] env[68282]: DEBUG nova.network.neutron [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Updated VIF entry in instance network info cache for port d875d300-ebfd-4ef6-9566-3a6a39b6159e. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1045.724110] env[68282]: DEBUG nova.network.neutron [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Updating instance_info_cache with network_info: [{"id": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "address": "fa:16:3e:84:55:2b", "network": {"id": "3386090a-7f8d-4d16-aa7c-f3f61f0acc1e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1001357851-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4476d0cb39524d6ea6e463571c775360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6046aec4-feda-4ef9-bf4a-800de1e0cd3b", "external-id": "nsx-vlan-transportzone-903", "segmentation_id": 903, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd875d300-eb", "ovs_interfaceid": "d875d300-ebfd-4ef6-9566-3a6a39b6159e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.734482] env[68282]: DEBUG oslo_concurrency.lockutils [req-12d58aa7-872f-4e90-97ad-18a901685735 req-14724e76-0b52-4d4f-b899-665c1bdf7e3f service nova] Releasing lock "refresh_cache-66243637-f1f4-4c60-b12a-bbe30c423630" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.008173] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470495, 'name': CreateVM_Task, 'duration_secs': 0.292291} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.008423] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.009430] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.009627] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.010076] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.010439] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11bdb578-2532-47ab-a849-24b3cfac8c25 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.016045] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for the task: (returnval){ [ 1046.016045] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ef0d92-b138-3eee-f5fb-4c4c621d4ed9" [ 1046.016045] env[68282]: _type = "Task" [ 1046.016045] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.026870] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ef0d92-b138-3eee-f5fb-4c4c621d4ed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.526209] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.526499] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.526686] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.921190] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "66243637-f1f4-4c60-b12a-bbe30c423630" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.088619] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1072.102737] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.102970] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.103167] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.103322] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1072.104536] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f57f25-d0fb-49e3-9420-75e2d96a4049 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.114846] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7ea17d-80a9-497c-b2f3-e059cdb52bda {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.129577] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfc8049-75d0-4ace-b13f-02deed31d898 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.136620] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db686447-fa29-4a69-8d48-18015fe70d0c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.165760] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180940MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1072.165911] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.166157] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.247682] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.247866] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.247997] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248135] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248258] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248377] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248493] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248615] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248802] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.248936] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1072.277674] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b338f0ef-8361-40de-b45b-309cb87a17e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.289559] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.301464] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c4ee8ebc-d940-4ada-b5ec-9a74439d3e87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.312094] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 15d79a66-cec9-4b7c-9680-ad5c125b4cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.321592] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 994596dc-886c-457e-a634-129e416ce7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.332155] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.341499] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7be3eafd-b91b-4165-966c-aa056ea1a2dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.352031] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4af66512-25b8-495c-8217-4b99d6db34d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.361091] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8f42c3dc-715f-4e0a-b826-6917a74d85f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.370484] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d18f6406-84a4-42c8-9508-50cf79fff0bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.379860] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 768a616d-caac-48e2-8d6e-efe4c7e544a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.389465] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25a8a015-8e33-4db5-a110-60cccf150165 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.398748] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d1a690ad-5c57-46c7-895f-025c787e5526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.408440] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c7247022-d5a2-41b4-a378-f056e7429c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.417982] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.427992] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 263caec2-5b70-4a83-9567-057ea7526bf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.428261] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1072.428409] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1072.727803] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279796ce-b2d9-46a4-a9cc-234d510ec0dc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.735036] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea8f323-8e4b-483d-824f-9fdda46dc6c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.765121] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e18b3f-587f-4b00-8919-d6ffbaf3bd33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.771980] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d172be-b3d9-4f96-90cd-bcd8ab231c20 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.785652] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.793727] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1072.808165] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1072.808407] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.642s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.807631] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.083365] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.086777] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.086938] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1078.088632] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.088882] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.089267] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.089468] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.089468] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1080.089468] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1080.108302] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.108471] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.108606] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.108737] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.108997] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.108997] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.109268] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.109418] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.109544] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.109666] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1080.109786] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1080.110290] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.322259] env[68282]: WARNING oslo_vmware.rw_handles [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1091.322259] env[68282]: ERROR oslo_vmware.rw_handles [ 1091.323054] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1091.324923] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1091.325197] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Copying Virtual Disk [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/63501b99-0341-483c-ab4e-7b701a05bcae/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1091.325538] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08ca5ae1-0fc1-451e-9932-66f091d01fba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.334092] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for the task: (returnval){ [ 1091.334092] env[68282]: value = "task-3470496" [ 1091.334092] env[68282]: _type = "Task" [ 1091.334092] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.343105] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Task: {'id': task-3470496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.844578] env[68282]: DEBUG oslo_vmware.exceptions [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1091.844901] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.845649] env[68282]: ERROR nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1091.845649] env[68282]: Faults: ['InvalidArgument'] [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Traceback (most recent call last): [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] yield resources [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self.driver.spawn(context, instance, image_meta, [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self._fetch_image_if_missing(context, vi) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] image_cache(vi, tmp_image_ds_loc) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] vm_util.copy_virtual_disk( [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] session._wait_for_task(vmdk_copy_task) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return self.wait_for_task(task_ref) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return evt.wait() [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] result = hub.switch() [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return self.greenlet.switch() [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self.f(*self.args, **self.kw) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] raise exceptions.translate_fault(task_info.error) [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Faults: ['InvalidArgument'] [ 1091.845649] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] [ 1091.846622] env[68282]: INFO nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Terminating instance [ 1091.847702] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.847915] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.848201] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61e88b7b-33a1-4732-9abf-35758d354ca8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.850353] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.850551] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.850872] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1091.858901] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.859051] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1091.860306] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc8219ed-69db-4eb5-b274-f00128852007 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.869023] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for the task: (returnval){ [ 1091.869023] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52da7cf4-7728-c523-a4e1-62a7c2cc6d2f" [ 1091.869023] env[68282]: _type = "Task" [ 1091.869023] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.875833] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52da7cf4-7728-c523-a4e1-62a7c2cc6d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.882454] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1091.946423] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.957359] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Releasing lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.957928] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1091.958489] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.959240] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f72df8-7df1-4f64-8ce8-2e3faeb968d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.967288] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1091.967445] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8dbd1014-5a10-4251-8a0c-96641b0e700c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.999365] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1091.999579] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1091.999759] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Deleting the datastore file [datastore2] 650fcdae-dc95-4191-9696-3b6f004bdb62 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.000038] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dc65d0c-7f22-4219-8895-9ff2ed3ea337 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.007290] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for the task: (returnval){ [ 1092.007290] env[68282]: value = "task-3470498" [ 1092.007290] env[68282]: _type = "Task" [ 1092.007290] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.014852] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Task: {'id': task-3470498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.380545] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1092.380545] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Creating directory with path [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.382578] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3ea7b92-de5e-4af9-96a3-ef9c070da49d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.452352] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Created directory with path [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.452568] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Fetch image to [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1092.452814] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1092.453677] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb99c07-4b11-486f-a9f4-214536a46cbc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.461349] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19270362-d858-4ad3-b198-815a106f9c18 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.470586] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5635ffce-4827-4c1c-98b0-af5b77c4124b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.501758] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e46325-f7b0-45d2-b190-9ccda81407f6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.507458] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-56103b62-88bc-4995-ae01-1935b7d7993c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.517063] env[68282]: DEBUG oslo_vmware.api [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Task: {'id': task-3470498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04204} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.517294] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.517477] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1092.517670] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1092.517848] env[68282]: INFO nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1092.518106] env[68282]: DEBUG oslo.service.loopingcall [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1092.518323] env[68282]: DEBUG nova.compute.manager [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1092.520604] env[68282]: DEBUG nova.compute.claims [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1092.520819] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.521064] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.531429] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1092.587498] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1092.653299] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1092.653489] env[68282]: DEBUG oslo_vmware.rw_handles [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1092.933158] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9180071-0374-477b-ae1c-8b2635bc6766 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.941151] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d209ee35-4741-4d74-8830-83a5b55cf42c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.972603] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ab311e-eca4-4a58-bfe8-faa61307ae70 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.980625] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c754fc27-cb8b-4fac-9a39-0f878356d03c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.993780] env[68282]: DEBUG nova.compute.provider_tree [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.002484] env[68282]: DEBUG nova.scheduler.client.report [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1093.023193] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.502s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.023800] env[68282]: ERROR nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1093.023800] env[68282]: Faults: ['InvalidArgument'] [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Traceback (most recent call last): [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self.driver.spawn(context, instance, image_meta, [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self._fetch_image_if_missing(context, vi) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] image_cache(vi, tmp_image_ds_loc) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] vm_util.copy_virtual_disk( [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] session._wait_for_task(vmdk_copy_task) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return self.wait_for_task(task_ref) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return evt.wait() [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] result = hub.switch() [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] return self.greenlet.switch() [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] self.f(*self.args, **self.kw) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] raise exceptions.translate_fault(task_info.error) [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Faults: ['InvalidArgument'] [ 1093.023800] env[68282]: ERROR nova.compute.manager [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] [ 1093.024740] env[68282]: DEBUG nova.compute.utils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1093.026241] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Build of instance 650fcdae-dc95-4191-9696-3b6f004bdb62 was re-scheduled: A specified parameter was not correct: fileType [ 1093.026241] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1093.026667] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1093.026912] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.027078] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.027265] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.055784] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.133883] env[68282]: DEBUG nova.network.neutron [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.143150] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Releasing lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.143409] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1093.143820] env[68282]: DEBUG nova.compute.manager [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1093.250728] env[68282]: INFO nova.scheduler.client.report [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Deleted allocations for instance 650fcdae-dc95-4191-9696-3b6f004bdb62 [ 1093.275463] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c27eb1e9-f1c5-4d03-8714-de4ebd2125f5 tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 367.653s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.276557] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 167.772s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.276776] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "650fcdae-dc95-4191-9696-3b6f004bdb62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.276984] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.277166] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.279267] env[68282]: INFO nova.compute.manager [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Terminating instance [ 1093.280885] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquiring lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.281059] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Acquired lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.281237] env[68282]: DEBUG nova.network.neutron [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1093.296047] env[68282]: DEBUG nova.compute.manager [None req-aeae6255-1e7d-4706-959f-86410b020ff6 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: b338f0ef-8361-40de-b45b-309cb87a17e9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1093.317426] env[68282]: DEBUG nova.network.neutron [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.320873] env[68282]: DEBUG nova.compute.manager [None req-aeae6255-1e7d-4706-959f-86410b020ff6 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] [instance: b338f0ef-8361-40de-b45b-309cb87a17e9] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1093.342444] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeae6255-1e7d-4706-959f-86410b020ff6 tempest-MigrationsAdminTest-1164929110 tempest-MigrationsAdminTest-1164929110-project-member] Lock "b338f0ef-8361-40de-b45b-309cb87a17e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.046s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.352453] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1093.408745] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.409067] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.410718] env[68282]: INFO nova.compute.claims [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.414255] env[68282]: DEBUG nova.network.neutron [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.427024] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Releasing lock "refresh_cache-650fcdae-dc95-4191-9696-3b6f004bdb62" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1093.427024] env[68282]: DEBUG nova.compute.manager [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1093.427024] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1093.427295] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa109156-4c75-4fdf-b20d-ae8fc987a7d1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.438668] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bc41b0-9983-4188-a455-846f45c41ea2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.474726] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 650fcdae-dc95-4191-9696-3b6f004bdb62 could not be found. [ 1093.474941] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1093.475259] env[68282]: INFO nova.compute.manager [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1093.475522] env[68282]: DEBUG oslo.service.loopingcall [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1093.479458] env[68282]: DEBUG nova.compute.manager [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1093.479458] env[68282]: DEBUG nova.network.neutron [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1093.496909] env[68282]: DEBUG nova.network.neutron [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1093.506873] env[68282]: DEBUG nova.network.neutron [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.517166] env[68282]: INFO nova.compute.manager [-] [instance: 650fcdae-dc95-4191-9696-3b6f004bdb62] Took 0.04 seconds to deallocate network for instance. [ 1093.629564] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4631693b-a76c-492a-8b7f-3ec201107bec tempest-ServersAaction247Test-311755086 tempest-ServersAaction247Test-311755086-project-member] Lock "650fcdae-dc95-4191-9696-3b6f004bdb62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.353s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.829188] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25989a6-82ee-4a31-a41f-ce2f06bb3dde {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.837696] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1771cf-4cf7-4b66-b1ff-72ca1c6c840d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.871919] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e658bb9-88ac-412c-91ab-df2204ad63d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.878202] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0472613-a8c4-4e93-9374-0dc2efc16a1d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.892588] env[68282]: DEBUG nova.compute.provider_tree [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.900946] env[68282]: DEBUG nova.scheduler.client.report [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1093.918274] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.509s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.918829] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1093.956992] env[68282]: DEBUG nova.compute.utils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1093.958783] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1093.958910] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1093.970840] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1094.034761] env[68282]: DEBUG nova.policy [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aee8a1e035742e0b67873bfcce2ef72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3d2a3fac4b04f8fa6622043de5e500d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1094.064323] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1094.097329] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.097772] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.097831] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.098017] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.098993] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.099270] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.099531] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.101118] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.101118] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.101118] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.101118] env[68282]: DEBUG nova.virt.hardware [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.102075] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809e8e05-709c-4275-a866-f49fdde64cb6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.113800] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7ff45b-c0de-4b58-abe4-3685ad420441 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.391942] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Successfully created port: 9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.176300] env[68282]: DEBUG nova.compute.manager [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Received event network-vif-plugged-9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1095.176551] env[68282]: DEBUG oslo_concurrency.lockutils [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] Acquiring lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.176764] env[68282]: DEBUG oslo_concurrency.lockutils [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.176953] env[68282]: DEBUG oslo_concurrency.lockutils [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.177800] env[68282]: DEBUG nova.compute.manager [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] No waiting events found dispatching network-vif-plugged-9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1095.177800] env[68282]: WARNING nova.compute.manager [req-b2236d51-2428-4ea6-8205-40a1dbc9a483 req-b84263ef-4706-4994-9dfd-75b060ef85b4 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Received unexpected event network-vif-plugged-9e68460f-b799-45e6-ab01-8f14e4e4be95 for instance with vm_state building and task_state spawning. [ 1095.240545] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Successfully updated port: 9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.261442] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.261596] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.261747] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1095.310453] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1095.495025] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Updating instance_info_cache with network_info: [{"id": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "address": "fa:16:3e:b3:10:26", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e68460f-b7", "ovs_interfaceid": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.508590] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.509181] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance network_info: |[{"id": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "address": "fa:16:3e:b3:10:26", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e68460f-b7", "ovs_interfaceid": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1095.509446] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:10:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e68460f-b799-45e6-ab01-8f14e4e4be95', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1095.521167] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating folder: Project (e3d2a3fac4b04f8fa6622043de5e500d). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1095.521167] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80be1eb8-98f5-4903-906c-7369667d5926 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.534376] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created folder: Project (e3d2a3fac4b04f8fa6622043de5e500d) in parent group-v693573. [ 1095.534376] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating folder: Instances. Parent ref: group-v693627. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1095.534376] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48748c6e-f459-4054-8ecc-0f3da9d8fbbe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.542297] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created folder: Instances in parent group-v693627. [ 1095.542730] env[68282]: DEBUG oslo.service.loopingcall [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1095.543072] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1095.543430] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8151724b-e5f6-4631-9111-e102b075aa6d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.563629] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1095.563629] env[68282]: value = "task-3470501" [ 1095.563629] env[68282]: _type = "Task" [ 1095.563629] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.571556] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470501, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.073952] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470501, 'name': CreateVM_Task, 'duration_secs': 0.280869} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.074227] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1096.074918] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.075112] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.075467] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1096.075720] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85932f78-8e72-4823-a36a-dc1fd4495821 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.080255] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1096.080255] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52353104-86cf-7fe8-a380-523cbadaf969" [ 1096.080255] env[68282]: _type = "Task" [ 1096.080255] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.088678] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52353104-86cf-7fe8-a380-523cbadaf969, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.591610] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.591972] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1096.592244] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.367968] env[68282]: DEBUG nova.compute.manager [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Received event network-changed-9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1097.368106] env[68282]: DEBUG nova.compute.manager [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Refreshing instance network info cache due to event network-changed-9e68460f-b799-45e6-ab01-8f14e4e4be95. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1097.368261] env[68282]: DEBUG oslo_concurrency.lockutils [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] Acquiring lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.368444] env[68282]: DEBUG oslo_concurrency.lockutils [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] Acquired lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.368590] env[68282]: DEBUG nova.network.neutron [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Refreshing network info cache for port 9e68460f-b799-45e6-ab01-8f14e4e4be95 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1097.709488] env[68282]: DEBUG nova.network.neutron [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Updated VIF entry in instance network info cache for port 9e68460f-b799-45e6-ab01-8f14e4e4be95. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1097.709848] env[68282]: DEBUG nova.network.neutron [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Updating instance_info_cache with network_info: [{"id": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "address": "fa:16:3e:b3:10:26", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e68460f-b7", "ovs_interfaceid": "9e68460f-b799-45e6-ab01-8f14e4e4be95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.722363] env[68282]: DEBUG oslo_concurrency.lockutils [req-228de663-5c63-4319-b4ff-c90cfe318af8 req-ee3a5663-0b7f-43a3-ac0e-9d6036a28401 service nova] Releasing lock "refresh_cache-eeb7149b-8d07-4968-9089-d6278c4565e5" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.152404] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.152654] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.187308] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.187630] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.214148] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.214323] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.820747] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "6680219f-25bf-453c-ba97-4aeb3295f62b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.821022] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.087745] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.105258] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.105527] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.105708] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.105897] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.107144] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60233794-68ed-47c0-8249-53d1759cb11f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.120262] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744569ce-3375-47ad-8f8a-9caeaa616f2f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.143760] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc86e54-3f61-4c56-a32a-8c23e295acd2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.155128] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a2113e-95be-47b9-af32-5d961d855385 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.190450] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180940MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1132.190633] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.190883] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.295185] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25eddb82-c2b3-499f-afe0-5141b4624342 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.295476] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.295669] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.295844] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296124] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296227] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296412] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296601] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296781] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.296970] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1132.314885] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c4ee8ebc-d940-4ada-b5ec-9a74439d3e87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.333929] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 15d79a66-cec9-4b7c-9680-ad5c125b4cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.349093] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 994596dc-886c-457e-a634-129e416ce7b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.365924] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.383350] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7be3eafd-b91b-4165-966c-aa056ea1a2dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.402032] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4af66512-25b8-495c-8217-4b99d6db34d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.415315] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8f42c3dc-715f-4e0a-b826-6917a74d85f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.432745] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d18f6406-84a4-42c8-9508-50cf79fff0bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.448039] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 768a616d-caac-48e2-8d6e-efe4c7e544a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.465831] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 25a8a015-8e33-4db5-a110-60cccf150165 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.478225] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d1a690ad-5c57-46c7-895f-025c787e5526 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.489595] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c7247022-d5a2-41b4-a378-f056e7429c2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.501647] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.514786] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 263caec2-5b70-4a83-9567-057ea7526bf9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.527714] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.541436] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.557687] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.572450] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1132.572697] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1132.572848] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.131251] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ecf3760-057d-4297-8d78-d60dd8323b2a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.139677] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b294f3-5823-41c3-951c-1df2271479cd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.173140] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c50814-4a90-46dc-9c64-9d424e7d6823 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.182024] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13625d5a-a2b0-47c3-bbda-e564341abed9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.198342] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.220175] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1133.241502] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1133.241502] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.050s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.541460] env[68282]: DEBUG oslo_concurrency.lockutils [None req-3c9f7227-8ef4-4bf9-8ffd-079555ee3f9d tempest-ImagesOneServerNegativeTestJSON-1930787394 tempest-ImagesOneServerNegativeTestJSON-1930787394-project-member] Acquiring lock "e06eb2ba-c8a8-41b8-89b8-2afb94abe501" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.542563] env[68282]: DEBUG oslo_concurrency.lockutils [None req-3c9f7227-8ef4-4bf9-8ffd-079555ee3f9d tempest-ImagesOneServerNegativeTestJSON-1930787394 tempest-ImagesOneServerNegativeTestJSON-1930787394-project-member] Lock "e06eb2ba-c8a8-41b8-89b8-2afb94abe501" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.762765] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "eeb7149b-8d07-4968-9089-d6278c4565e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.570764] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5cf49791-98e3-4e5e-8a18-024b14007d33 tempest-ServersTestJSON-1523968152 tempest-ServersTestJSON-1523968152-project-member] Acquiring lock "52507e06-547b-4fbe-8689-cf497332c7de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.570764] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5cf49791-98e3-4e5e-8a18-024b14007d33 tempest-ServersTestJSON-1523968152 tempest-ServersTestJSON-1523968152-project-member] Lock "52507e06-547b-4fbe-8689-cf497332c7de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.243959] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.088059] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.083376] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.086948] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.087216] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1139.451764] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c4b81251-101c-49a8-9fea-3fa8ce7e899a tempest-ServerActionsTestJSON-800717835 tempest-ServerActionsTestJSON-800717835-project-member] Acquiring lock "8b6e3667-a6d8-4840-a849-7f4e26f93767" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.451764] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c4b81251-101c-49a8-9fea-3fa8ce7e899a tempest-ServerActionsTestJSON-800717835 tempest-ServerActionsTestJSON-800717835-project-member] Lock "8b6e3667-a6d8-4840-a849-7f4e26f93767" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.083355] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.112240] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.112771] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.113155] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.089197] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.089197] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1141.089197] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115669] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115669] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1141.115669] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1141.338027] env[68282]: WARNING oslo_vmware.rw_handles [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1141.338027] env[68282]: ERROR oslo_vmware.rw_handles [ 1141.338502] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1141.340169] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1141.340448] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Copying Virtual Disk [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/9758f2be-4569-419b-90ab-d8bd4f196851/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1141.340701] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03404e69-a7a9-4c6a-ad9a-2f4925834100 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.349866] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for the task: (returnval){ [ 1141.349866] env[68282]: value = "task-3470506" [ 1141.349866] env[68282]: _type = "Task" [ 1141.349866] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.357397] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Task: {'id': task-3470506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.860917] env[68282]: DEBUG oslo_vmware.exceptions [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1141.861234] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.861805] env[68282]: ERROR nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1141.861805] env[68282]: Faults: ['InvalidArgument'] [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Traceback (most recent call last): [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] yield resources [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self.driver.spawn(context, instance, image_meta, [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self._fetch_image_if_missing(context, vi) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] image_cache(vi, tmp_image_ds_loc) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] vm_util.copy_virtual_disk( [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] session._wait_for_task(vmdk_copy_task) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return self.wait_for_task(task_ref) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return evt.wait() [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] result = hub.switch() [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return self.greenlet.switch() [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self.f(*self.args, **self.kw) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] raise exceptions.translate_fault(task_info.error) [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Faults: ['InvalidArgument'] [ 1141.861805] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] [ 1141.863320] env[68282]: INFO nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Terminating instance [ 1141.863971] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.864300] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.864862] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1141.865068] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1141.865309] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acfa107d-f0f1-46cf-b5ab-5ded39028b21 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.867668] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fff8f63-fc44-4f25-827a-b47a0202ca38 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.875456] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1141.875671] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a48aa393-b8fb-4e16-ba0d-e0b2536bcc39 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.877970] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1141.878179] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1141.879125] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13299677-762e-4e87-bdaf-8e43a254218c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.883858] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for the task: (returnval){ [ 1141.883858] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f81251-e53b-31b0-030d-104187aaceff" [ 1141.883858] env[68282]: _type = "Task" [ 1141.883858] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.891660] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f81251-e53b-31b0-030d-104187aaceff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.944186] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1141.945480] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1141.945685] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Deleting the datastore file [datastore2] 25eddb82-c2b3-499f-afe0-5141b4624342 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.945965] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90b60d98-f66a-45a3-b28d-18a1526a937e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.952766] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for the task: (returnval){ [ 1141.952766] env[68282]: value = "task-3470508" [ 1141.952766] env[68282]: _type = "Task" [ 1141.952766] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.961270] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Task: {'id': task-3470508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.394302] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1142.394660] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Creating directory with path [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1142.394822] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc515214-2c4b-4350-bc84-afb756095751 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.405590] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Created directory with path [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.405844] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Fetch image to [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1142.406036] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1142.406833] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce952ecc-4f9a-4973-99f6-c334df25168a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.417554] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8a1fcf-5122-4707-abc0-552398ad7a6b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.427557] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882e1983-222f-4771-9670-8f6970e0ba6e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.467267] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44108e8-8846-4a9d-b655-c9835e319d6c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.475441] env[68282]: DEBUG oslo_vmware.api [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Task: {'id': task-3470508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069869} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.477110] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.477465] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1142.477770] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1142.478094] env[68282]: INFO nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1142.480642] env[68282]: DEBUG nova.compute.claims [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1142.483400] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.483400] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.484047] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c09f18c9-f577-43ba-ba43-48281da3e654 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.507193] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1142.569684] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1142.630714] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1142.630994] env[68282]: DEBUG oslo_vmware.rw_handles [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1142.982076] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6057b51-770b-4fc5-b928-ead762a60011 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.990908] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc14c2f0-2d84-4ffd-9a34-474798dd6db6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.024633] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454706a6-e560-4ed2-af9d-5125247ab65f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.032609] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025af662-bf9b-4e9a-88dd-ce347ff9b129 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.045504] env[68282]: DEBUG nova.compute.provider_tree [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1143.054340] env[68282]: DEBUG nova.scheduler.client.report [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1143.079860] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.598s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.079930] env[68282]: ERROR nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1143.079930] env[68282]: Faults: ['InvalidArgument'] [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Traceback (most recent call last): [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self.driver.spawn(context, instance, image_meta, [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self._fetch_image_if_missing(context, vi) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] image_cache(vi, tmp_image_ds_loc) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] vm_util.copy_virtual_disk( [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] session._wait_for_task(vmdk_copy_task) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return self.wait_for_task(task_ref) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return evt.wait() [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] result = hub.switch() [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] return self.greenlet.switch() [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] self.f(*self.args, **self.kw) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] raise exceptions.translate_fault(task_info.error) [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Faults: ['InvalidArgument'] [ 1143.079930] env[68282]: ERROR nova.compute.manager [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] [ 1143.080696] env[68282]: DEBUG nova.compute.utils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1143.082358] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Build of instance 25eddb82-c2b3-499f-afe0-5141b4624342 was re-scheduled: A specified parameter was not correct: fileType [ 1143.082358] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1143.082775] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1143.082981] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1143.083195] env[68282]: DEBUG nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1143.083389] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1143.270520] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.270761] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.596928] env[68282]: DEBUG nova.network.neutron [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.609623] env[68282]: INFO nova.compute.manager [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Took 0.53 seconds to deallocate network for instance. [ 1143.701406] env[68282]: INFO nova.scheduler.client.report [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Deleted allocations for instance 25eddb82-c2b3-499f-afe0-5141b4624342 [ 1143.725640] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fa2ee510-b10d-4585-b7cd-5bd38d9d834e tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.727s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.727094] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 225.488s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.727399] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Acquiring lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.727666] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.727897] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.729973] env[68282]: INFO nova.compute.manager [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Terminating instance [ 1143.732064] env[68282]: DEBUG nova.compute.manager [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1143.732389] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1143.732973] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1c51131-8688-4cc8-b93f-310944aacf69 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.742255] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8cedbe-d580-4b3d-9546-2595e6bb6626 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.753220] env[68282]: DEBUG nova.compute.manager [None req-0ad470c5-fed7-4161-9939-69e2b2328fea tempest-ServersTestFqdnHostnames-663221872 tempest-ServersTestFqdnHostnames-663221872-project-member] [instance: c4ee8ebc-d940-4ada-b5ec-9a74439d3e87] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1143.776670] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25eddb82-c2b3-499f-afe0-5141b4624342 could not be found. [ 1143.777616] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1143.777616] env[68282]: INFO nova.compute.manager [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1143.777616] env[68282]: DEBUG oslo.service.loopingcall [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1143.777756] env[68282]: DEBUG nova.compute.manager [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1143.777877] env[68282]: DEBUG nova.network.neutron [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1143.782940] env[68282]: DEBUG nova.compute.manager [None req-0ad470c5-fed7-4161-9939-69e2b2328fea tempest-ServersTestFqdnHostnames-663221872 tempest-ServersTestFqdnHostnames-663221872-project-member] [instance: c4ee8ebc-d940-4ada-b5ec-9a74439d3e87] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1143.803741] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0ad470c5-fed7-4161-9939-69e2b2328fea tempest-ServersTestFqdnHostnames-663221872 tempest-ServersTestFqdnHostnames-663221872-project-member] Lock "c4ee8ebc-d940-4ada-b5ec-9a74439d3e87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.470s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.805467] env[68282]: DEBUG nova.network.neutron [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.813952] env[68282]: INFO nova.compute.manager [-] [instance: 25eddb82-c2b3-499f-afe0-5141b4624342] Took 0.04 seconds to deallocate network for instance. [ 1143.816166] env[68282]: DEBUG nova.compute.manager [None req-202c3962-c0eb-4c00-a639-7350fb3a83f2 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 15d79a66-cec9-4b7c-9680-ad5c125b4cad] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1143.842294] env[68282]: DEBUG nova.compute.manager [None req-202c3962-c0eb-4c00-a639-7350fb3a83f2 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 15d79a66-cec9-4b7c-9680-ad5c125b4cad] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1143.864777] env[68282]: DEBUG oslo_concurrency.lockutils [None req-202c3962-c0eb-4c00-a639-7350fb3a83f2 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "15d79a66-cec9-4b7c-9680-ad5c125b4cad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.227s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.875770] env[68282]: DEBUG nova.compute.manager [None req-daa0feb7-bdcc-4451-83e8-3d326684ec92 tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] [instance: 994596dc-886c-457e-a634-129e416ce7b8] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1143.902192] env[68282]: DEBUG nova.compute.manager [None req-daa0feb7-bdcc-4451-83e8-3d326684ec92 tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] [instance: 994596dc-886c-457e-a634-129e416ce7b8] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1143.913016] env[68282]: DEBUG oslo_concurrency.lockutils [None req-09d623c7-9464-4dad-9528-348b2b93ad97 tempest-AttachInterfacesUnderV243Test-870864962 tempest-AttachInterfacesUnderV243Test-870864962-project-member] Lock "25eddb82-c2b3-499f-afe0-5141b4624342" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.931053] env[68282]: DEBUG oslo_concurrency.lockutils [None req-daa0feb7-bdcc-4451-83e8-3d326684ec92 tempest-VolumesAdminNegativeTest-2113147513 tempest-VolumesAdminNegativeTest-2113147513-project-member] Lock "994596dc-886c-457e-a634-129e416ce7b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.917s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.941220] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1143.991510] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.991510] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.992409] env[68282]: INFO nova.compute.claims [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.419131] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be31f43e-dbb1-464e-a0e7-e74333d64d9f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.427624] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57739cae-6117-40ea-a785-a862c6333eae {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.458195] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a404db29-d08e-48eb-b1e7-91bd9ecfe952 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.468389] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bf0ce0-d5fb-4c4f-9f2b-c99b344f9841 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.480860] env[68282]: DEBUG nova.compute.provider_tree [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.496112] env[68282]: DEBUG nova.scheduler.client.report [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1144.509536] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.519s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.510073] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1144.552666] env[68282]: DEBUG nova.compute.utils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1144.556032] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1144.556032] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1144.567290] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1144.641291] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1144.649604] env[68282]: DEBUG nova.policy [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66da14ac2f9844d4a4dea14186c98997', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '002ed1a2c0ff4f559e70e1798afb0dd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1144.668427] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1144.668724] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1144.668924] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1144.669171] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1144.669365] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1144.669550] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1144.669806] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1144.670024] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1144.670233] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1144.670442] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1144.670667] env[68282]: DEBUG nova.virt.hardware [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1144.671663] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475c3234-ce14-441b-b1c2-f49b80e95216 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.680369] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0845ae9c-7624-4187-ac63-74c457e2cabb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.219048] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Successfully created port: ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1145.627399] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.849562] env[68282]: DEBUG nova.compute.manager [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Received event network-vif-plugged-ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1145.849862] env[68282]: DEBUG oslo_concurrency.lockutils [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] Acquiring lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.849987] env[68282]: DEBUG oslo_concurrency.lockutils [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.850171] env[68282]: DEBUG oslo_concurrency.lockutils [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.850341] env[68282]: DEBUG nova.compute.manager [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] No waiting events found dispatching network-vif-plugged-ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1145.850506] env[68282]: WARNING nova.compute.manager [req-5bf036d5-0c93-4281-af2d-3e47208f7277 req-0bc9f768-52d5-4d4d-8b73-2c7cec2ae37a service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Received unexpected event network-vif-plugged-ec86b6b5-8208-48df-b43b-cc8130e6ca0e for instance with vm_state building and task_state deleting. [ 1145.936448] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Successfully updated port: ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1145.949618] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1145.949681] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquired lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1145.950042] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1146.183665] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1146.474210] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updating instance_info_cache with network_info: [{"id": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "address": "fa:16:3e:e3:12:ac", "network": {"id": "2cf8ce84-dde3-4b32-b947-7b62e8cfa3bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-982264616-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "002ed1a2c0ff4f559e70e1798afb0dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec86b6b5-82", "ovs_interfaceid": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.490917] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Releasing lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1146.490917] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance network_info: |[{"id": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "address": "fa:16:3e:e3:12:ac", "network": {"id": "2cf8ce84-dde3-4b32-b947-7b62e8cfa3bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-982264616-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "002ed1a2c0ff4f559e70e1798afb0dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec86b6b5-82", "ovs_interfaceid": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1146.490917] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:12:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec86b6b5-8208-48df-b43b-cc8130e6ca0e', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1146.501186] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Creating folder: Project (002ed1a2c0ff4f559e70e1798afb0dd4). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1146.501186] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa10b788-ea96-4fc8-89f9-d86b08e76bbf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.512208] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Created folder: Project (002ed1a2c0ff4f559e70e1798afb0dd4) in parent group-v693573. [ 1146.512449] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Creating folder: Instances. Parent ref: group-v693633. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1146.512697] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8daaf4ad-efd7-439f-ac6b-1e313c2fbe34 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.527140] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Created folder: Instances in parent group-v693633. [ 1146.527413] env[68282]: DEBUG oslo.service.loopingcall [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1146.527610] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1146.527815] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ab821b8-9061-46f4-94a7-d74502a3905d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.563066] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1146.563066] env[68282]: value = "task-3470513" [ 1146.563066] env[68282]: _type = "Task" [ 1146.563066] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.573417] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470513, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.075306] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470513, 'name': CreateVM_Task, 'duration_secs': 0.309849} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.076420] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1147.077194] env[68282]: DEBUG oslo_vmware.service [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4251af6-41cf-44fc-8740-68f5529eed18 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.089327] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.089327] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.089327] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1147.089327] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a4acf8b-3140-4b33-8da1-cd7a68f39f9b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.095113] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for the task: (returnval){ [ 1147.095113] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52eca165-1a8a-7a78-8b27-90f7ede57cc4" [ 1147.095113] env[68282]: _type = "Task" [ 1147.095113] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.107233] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52eca165-1a8a-7a78-8b27-90f7ede57cc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.611300] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1147.611300] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1147.611300] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1147.611300] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.611300] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1147.611300] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7180debe-76ad-4177-9e0f-b8dc8d788aa6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.619635] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1147.619832] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1147.622662] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd29763-1902-44fe-8630-d2fe9b35a850 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.630958] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d3e01f5-946e-464e-a9bd-79f2f7ad46f5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.638995] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for the task: (returnval){ [ 1147.638995] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]526a00d4-eaeb-c2ed-026c-965026cd653c" [ 1147.638995] env[68282]: _type = "Task" [ 1147.638995] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.647370] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]526a00d4-eaeb-c2ed-026c-965026cd653c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.091019] env[68282]: DEBUG nova.compute.manager [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Received event network-changed-ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1148.091555] env[68282]: DEBUG nova.compute.manager [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Refreshing instance network info cache due to event network-changed-ec86b6b5-8208-48df-b43b-cc8130e6ca0e. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1148.091555] env[68282]: DEBUG oslo_concurrency.lockutils [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] Acquiring lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.091866] env[68282]: DEBUG oslo_concurrency.lockutils [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] Acquired lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.091866] env[68282]: DEBUG nova.network.neutron [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Refreshing network info cache for port ec86b6b5-8208-48df-b43b-cc8130e6ca0e {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1148.156013] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1148.156343] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Creating directory with path [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1148.156634] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3d85af6-869f-48b8-8fef-1384e6ea3182 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.187064] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Created directory with path [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1148.187064] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Fetch image to [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1148.187064] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore1 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1148.187064] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec87903-e809-424b-912f-5e36a484c138 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.195126] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96b866a-7a28-430f-86d4-b5e4470cb23c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.210010] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5a8999-3148-4c0d-a37a-85dd7a32601d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.252007] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23e28f2-3835-415e-b389-7ecccfbdc8fb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.259425] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f6d40612-9ceb-43d3-8b46-69e6fa814cc5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.283885] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore1 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1148.340810] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1148.408584] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1148.408855] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1148.864987] env[68282]: DEBUG nova.network.neutron [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updated VIF entry in instance network info cache for port ec86b6b5-8208-48df-b43b-cc8130e6ca0e. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1148.865967] env[68282]: DEBUG nova.network.neutron [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updating instance_info_cache with network_info: [{"id": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "address": "fa:16:3e:e3:12:ac", "network": {"id": "2cf8ce84-dde3-4b32-b947-7b62e8cfa3bd", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-982264616-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "002ed1a2c0ff4f559e70e1798afb0dd4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec86b6b5-82", "ovs_interfaceid": "ec86b6b5-8208-48df-b43b-cc8130e6ca0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.886095] env[68282]: DEBUG oslo_concurrency.lockutils [req-dea529c4-79a8-44f9-abb0-763883b90f6d req-d6134122-6f4e-4c02-ad55-57ca66a79d8e service nova] Releasing lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.464201] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "4340e67d-0b82-4f16-8c49-88886a57523f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.464613] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.904385] env[68282]: DEBUG oslo_concurrency.lockutils [None req-975a55a9-f0f9-4f7e-8dd9-64feba3818f3 tempest-InstanceActionsTestJSON-1920713201 tempest-InstanceActionsTestJSON-1920713201-project-member] Acquiring lock "e86e97e0-c191-4f39-9e71-1e99dfbbe65f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.904385] env[68282]: DEBUG oslo_concurrency.lockutils [None req-975a55a9-f0f9-4f7e-8dd9-64feba3818f3 tempest-InstanceActionsTestJSON-1920713201 tempest-InstanceActionsTestJSON-1920713201-project-member] Lock "e86e97e0-c191-4f39-9e71-1e99dfbbe65f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.387979] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9db6281d-c268-4088-b5b6-72adf5b7758f tempest-ServerActionsV293TestJSON-1576633023 tempest-ServerActionsV293TestJSON-1576633023-project-member] Acquiring lock "340f09fe-88a8-4b55-bf56-771d9fe1a14a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.387979] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9db6281d-c268-4088-b5b6-72adf5b7758f tempest-ServerActionsV293TestJSON-1576633023 tempest-ServerActionsV293TestJSON-1576633023-project-member] Lock "340f09fe-88a8-4b55-bf56-771d9fe1a14a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.538360] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e5f4dc5a-51be-4a09-ac0b-326da94af3a0 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Acquiring lock "70943b53-2303-4cf8-8e02-95cbf011454c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.538666] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e5f4dc5a-51be-4a09-ac0b-326da94af3a0 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Lock "70943b53-2303-4cf8-8e02-95cbf011454c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.548455] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c1751f96-f1e9-452f-bcac-a0b5f67ab02b tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Acquiring lock "da3a24eb-1e73-4137-b54f-da8077968d78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.548455] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c1751f96-f1e9-452f-bcac-a0b5f67ab02b tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "da3a24eb-1e73-4137-b54f-da8077968d78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.095075] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0c01a4cc-23cb-4e12-9af5-11e8da39ab24 tempest-ServerTagsTestJSON-396054325 tempest-ServerTagsTestJSON-396054325-project-member] Acquiring lock "d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.095495] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0c01a4cc-23cb-4e12-9af5-11e8da39ab24 tempest-ServerTagsTestJSON-396054325 tempest-ServerTagsTestJSON-396054325-project-member] Lock "d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.061959] env[68282]: DEBUG oslo_concurrency.lockutils [None req-19de0116-ef92-437e-ba8b-3ed8543086d3 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "f1e39c16-6bf4-4b22-9bac-b82d176df7f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.062254] env[68282]: DEBUG oslo_concurrency.lockutils [None req-19de0116-ef92-437e-ba8b-3ed8543086d3 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "f1e39c16-6bf4-4b22-9bac-b82d176df7f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.825997] env[68282]: WARNING oslo_vmware.rw_handles [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1190.825997] env[68282]: ERROR oslo_vmware.rw_handles [ 1190.826522] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1190.828339] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1190.828583] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Copying Virtual Disk [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/3af61196-031f-461e-a1df-0d5a963ebfb9/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1190.828862] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97c9a766-241a-45c8-80aa-a54f0236be90 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.837786] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for the task: (returnval){ [ 1190.837786] env[68282]: value = "task-3470518" [ 1190.837786] env[68282]: _type = "Task" [ 1190.837786] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.845981] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Task: {'id': task-3470518, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.348650] env[68282]: DEBUG oslo_vmware.exceptions [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1191.349019] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.349531] env[68282]: ERROR nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.349531] env[68282]: Faults: ['InvalidArgument'] [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Traceback (most recent call last): [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] yield resources [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self.driver.spawn(context, instance, image_meta, [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self._fetch_image_if_missing(context, vi) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] image_cache(vi, tmp_image_ds_loc) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] vm_util.copy_virtual_disk( [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] session._wait_for_task(vmdk_copy_task) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return self.wait_for_task(task_ref) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return evt.wait() [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] result = hub.switch() [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return self.greenlet.switch() [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self.f(*self.args, **self.kw) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] raise exceptions.translate_fault(task_info.error) [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Faults: ['InvalidArgument'] [ 1191.349531] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] [ 1191.350409] env[68282]: INFO nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Terminating instance [ 1191.351602] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.351836] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1191.352470] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1191.352663] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1191.352900] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db912787-a10d-4e3e-a2e5-8e9c2cc34e46 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.355341] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaad0471-a8b2-4d2c-915b-3267d6e8751c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.362264] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1191.362504] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc9836df-dda6-4c15-a7eb-05d77226984e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.364797] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1191.364969] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1191.365981] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f526af79-a4a4-4f61-b3a7-424ca26f8fde {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.370935] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for the task: (returnval){ [ 1191.370935] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5287460f-1c03-0ad6-9ee1-a5fcf9da9964" [ 1191.370935] env[68282]: _type = "Task" [ 1191.370935] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.378176] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5287460f-1c03-0ad6-9ee1-a5fcf9da9964, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.426105] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1191.426228] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1191.426420] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Deleting the datastore file [datastore2] 97cb5db2-5a4a-4a17-afde-3af1c15ae733 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1191.426698] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e732f6b-f776-4aa7-9779-167c0f848c78 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.432615] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for the task: (returnval){ [ 1191.432615] env[68282]: value = "task-3470520" [ 1191.432615] env[68282]: _type = "Task" [ 1191.432615] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.440881] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Task: {'id': task-3470520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.881716] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1191.881996] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Creating directory with path [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1191.882258] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b18db72c-1307-47b5-a943-6ae971d94a9d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.894550] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Created directory with path [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1191.894550] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Fetch image to [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1191.894550] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1191.895044] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4df20c-cead-4a48-9938-cd0a1946050e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.901908] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158f1c41-c1b4-4810-9bb2-9eaeeb8c89ba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.912494] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75541ec7-7e32-443e-b904-86214e39e0c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.947856] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53180b03-edd1-4cbb-bb53-877b5b20a945 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.954936] env[68282]: DEBUG oslo_vmware.api [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Task: {'id': task-3470520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076554} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.956471] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1191.956706] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1191.956894] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1191.957082] env[68282]: INFO nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1191.958875] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bab3eeec-c3b3-4f62-8359-8f59181f8f45 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.960833] env[68282]: DEBUG nova.compute.claims [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1191.961219] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.961482] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1191.984806] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1192.054707] env[68282]: DEBUG oslo_vmware.rw_handles [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1192.129023] env[68282]: DEBUG oslo_vmware.rw_handles [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1192.129023] env[68282]: DEBUG oslo_vmware.rw_handles [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1192.405365] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3c41a7-8367-4c73-b029-148e0349e643 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.412827] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac35cf96-d60a-4332-b8c7-ce040219bcc2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.443942] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec203b0-76ce-4a5e-a5a8-20cfeadfce18 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.451827] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd16b660-c135-4dc2-932c-4d44a6cbb2a7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.464986] env[68282]: DEBUG nova.compute.provider_tree [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.487333] env[68282]: DEBUG nova.scheduler.client.report [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1192.504051] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.541s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.504051] env[68282]: ERROR nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1192.504051] env[68282]: Faults: ['InvalidArgument'] [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Traceback (most recent call last): [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self.driver.spawn(context, instance, image_meta, [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self._fetch_image_if_missing(context, vi) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] image_cache(vi, tmp_image_ds_loc) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] vm_util.copy_virtual_disk( [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] session._wait_for_task(vmdk_copy_task) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return self.wait_for_task(task_ref) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return evt.wait() [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] result = hub.switch() [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] return self.greenlet.switch() [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] self.f(*self.args, **self.kw) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] raise exceptions.translate_fault(task_info.error) [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Faults: ['InvalidArgument'] [ 1192.504051] env[68282]: ERROR nova.compute.manager [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] [ 1192.504899] env[68282]: DEBUG nova.compute.utils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1192.506101] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Build of instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 was re-scheduled: A specified parameter was not correct: fileType [ 1192.506101] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1192.506492] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1192.506678] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1192.506860] env[68282]: DEBUG nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1192.507039] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1193.016288] env[68282]: DEBUG nova.network.neutron [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.038839] env[68282]: INFO nova.compute.manager [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Took 0.53 seconds to deallocate network for instance. [ 1193.169641] env[68282]: INFO nova.scheduler.client.report [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Deleted allocations for instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 [ 1193.191647] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5095264c-0a1c-4f93-8bbc-15e3dc60292b tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 467.993s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.192741] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 266.857s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.193017] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Acquiring lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.193286] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.193516] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.195506] env[68282]: INFO nova.compute.manager [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Terminating instance [ 1193.197458] env[68282]: DEBUG nova.compute.manager [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1193.197655] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1193.198145] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6f4e799-51dc-4ee9-8a1f-62ab275998f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.207150] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90784ef-6ae8-4ed7-83be-162e8ea694cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.217906] env[68282]: DEBUG nova.compute.manager [None req-cf94b487-547a-4ad9-b72c-329a4962d7a4 tempest-InstanceActionsNegativeTestJSON-1875884134 tempest-InstanceActionsNegativeTestJSON-1875884134-project-member] [instance: 7be3eafd-b91b-4165-966c-aa056ea1a2dc] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.240020] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97cb5db2-5a4a-4a17-afde-3af1c15ae733 could not be found. [ 1193.240020] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1193.240020] env[68282]: INFO nova.compute.manager [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1193.240020] env[68282]: DEBUG oslo.service.loopingcall [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1193.240020] env[68282]: DEBUG nova.compute.manager [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1193.240020] env[68282]: DEBUG nova.network.neutron [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1193.246660] env[68282]: DEBUG nova.compute.manager [None req-cf94b487-547a-4ad9-b72c-329a4962d7a4 tempest-InstanceActionsNegativeTestJSON-1875884134 tempest-InstanceActionsNegativeTestJSON-1875884134-project-member] [instance: 7be3eafd-b91b-4165-966c-aa056ea1a2dc] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.266205] env[68282]: DEBUG nova.network.neutron [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.276315] env[68282]: DEBUG oslo_concurrency.lockutils [None req-cf94b487-547a-4ad9-b72c-329a4962d7a4 tempest-InstanceActionsNegativeTestJSON-1875884134 tempest-InstanceActionsNegativeTestJSON-1875884134-project-member] Lock "7be3eafd-b91b-4165-966c-aa056ea1a2dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.718s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.278538] env[68282]: INFO nova.compute.manager [-] [instance: 97cb5db2-5a4a-4a17-afde-3af1c15ae733] Took 0.04 seconds to deallocate network for instance. [ 1193.293125] env[68282]: DEBUG nova.compute.manager [None req-03f11d3c-001a-4051-9ca9-51911c825938 tempest-ServersAdminNegativeTestJSON-813769931 tempest-ServersAdminNegativeTestJSON-813769931-project-member] [instance: 4af66512-25b8-495c-8217-4b99d6db34d4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.322086] env[68282]: DEBUG nova.compute.manager [None req-03f11d3c-001a-4051-9ca9-51911c825938 tempest-ServersAdminNegativeTestJSON-813769931 tempest-ServersAdminNegativeTestJSON-813769931-project-member] [instance: 4af66512-25b8-495c-8217-4b99d6db34d4] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.349884] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03f11d3c-001a-4051-9ca9-51911c825938 tempest-ServersAdminNegativeTestJSON-813769931 tempest-ServersAdminNegativeTestJSON-813769931-project-member] Lock "4af66512-25b8-495c-8217-4b99d6db34d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.754s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.360850] env[68282]: DEBUG nova.compute.manager [None req-76f65f3f-b2b7-4c64-b8ca-72c5fd727571 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] [instance: 8f42c3dc-715f-4e0a-b826-6917a74d85f3] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.384288] env[68282]: DEBUG nova.compute.manager [None req-76f65f3f-b2b7-4c64-b8ca-72c5fd727571 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] [instance: 8f42c3dc-715f-4e0a-b826-6917a74d85f3] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.390362] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1e14d258-d3c9-43d5-9122-ab99bafc1663 tempest-ServersWithSpecificFlavorTestJSON-1225864341 tempest-ServersWithSpecificFlavorTestJSON-1225864341-project-member] Lock "97cb5db2-5a4a-4a17-afde-3af1c15ae733" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.406778] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76f65f3f-b2b7-4c64-b8ca-72c5fd727571 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Lock "8f42c3dc-715f-4e0a-b826-6917a74d85f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.589s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.418647] env[68282]: DEBUG nova.compute.manager [None req-a86f7592-9d39-4fb1-a7fb-3c82ac644ec3 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] [instance: d18f6406-84a4-42c8-9508-50cf79fff0bf] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.443366] env[68282]: DEBUG nova.compute.manager [None req-a86f7592-9d39-4fb1-a7fb-3c82ac644ec3 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] [instance: d18f6406-84a4-42c8-9508-50cf79fff0bf] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.466755] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a86f7592-9d39-4fb1-a7fb-3c82ac644ec3 tempest-ServersAdminTestJSON-1192554081 tempest-ServersAdminTestJSON-1192554081-project-member] Lock "d18f6406-84a4-42c8-9508-50cf79fff0bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.512s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.476156] env[68282]: DEBUG nova.compute.manager [None req-0073bd9f-39ce-4189-aa5e-650146d564e3 tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: 768a616d-caac-48e2-8d6e-efe4c7e544a9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.502412] env[68282]: DEBUG nova.compute.manager [None req-0073bd9f-39ce-4189-aa5e-650146d564e3 tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: 768a616d-caac-48e2-8d6e-efe4c7e544a9] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.525607] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0073bd9f-39ce-4189-aa5e-650146d564e3 tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "768a616d-caac-48e2-8d6e-efe4c7e544a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.972s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.537024] env[68282]: DEBUG nova.compute.manager [None req-dbbf8853-44cf-48c7-b201-8c76e7cfa3a7 tempest-ServerRescueTestJSON-1990750847 tempest-ServerRescueTestJSON-1990750847-project-member] [instance: 25a8a015-8e33-4db5-a110-60cccf150165] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.563200] env[68282]: DEBUG nova.compute.manager [None req-dbbf8853-44cf-48c7-b201-8c76e7cfa3a7 tempest-ServerRescueTestJSON-1990750847 tempest-ServerRescueTestJSON-1990750847-project-member] [instance: 25a8a015-8e33-4db5-a110-60cccf150165] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.588936] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbbf8853-44cf-48c7-b201-8c76e7cfa3a7 tempest-ServerRescueTestJSON-1990750847 tempest-ServerRescueTestJSON-1990750847-project-member] Lock "25a8a015-8e33-4db5-a110-60cccf150165" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.773s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.603435] env[68282]: DEBUG nova.compute.manager [None req-68b797c5-d5fc-4c68-a2bc-59bf4d6ad392 tempest-ServersTestManualDisk-1673873711 tempest-ServersTestManualDisk-1673873711-project-member] [instance: d1a690ad-5c57-46c7-895f-025c787e5526] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.635143] env[68282]: DEBUG nova.compute.manager [None req-68b797c5-d5fc-4c68-a2bc-59bf4d6ad392 tempest-ServersTestManualDisk-1673873711 tempest-ServersTestManualDisk-1673873711-project-member] [instance: d1a690ad-5c57-46c7-895f-025c787e5526] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.657281] env[68282]: DEBUG oslo_concurrency.lockutils [None req-68b797c5-d5fc-4c68-a2bc-59bf4d6ad392 tempest-ServersTestManualDisk-1673873711 tempest-ServersTestManualDisk-1673873711-project-member] Lock "d1a690ad-5c57-46c7-895f-025c787e5526" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.238s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.667050] env[68282]: DEBUG nova.compute.manager [None req-55de8ccd-88a4-4406-afd4-1714b8bf6599 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] [instance: c7247022-d5a2-41b4-a378-f056e7429c2e] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.695813] env[68282]: DEBUG nova.compute.manager [None req-55de8ccd-88a4-4406-afd4-1714b8bf6599 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] [instance: c7247022-d5a2-41b4-a378-f056e7429c2e] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.720322] env[68282]: DEBUG oslo_concurrency.lockutils [None req-55de8ccd-88a4-4406-afd4-1714b8bf6599 tempest-SecurityGroupsTestJSON-1872895422 tempest-SecurityGroupsTestJSON-1872895422-project-member] Lock "c7247022-d5a2-41b4-a378-f056e7429c2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.975s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.731117] env[68282]: DEBUG nova.compute.manager [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.754478] env[68282]: DEBUG nova.compute.manager [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.778715] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "0f3637f1-b8e2-47a9-99ca-7f63aa86bf2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.160s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.787859] env[68282]: DEBUG nova.compute.manager [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 263caec2-5b70-4a83-9567-057ea7526bf9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.818157] env[68282]: DEBUG nova.compute.manager [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] [instance: 263caec2-5b70-4a83-9567-057ea7526bf9] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1193.840524] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4ea8dc49-6900-4f6e-93aa-ba5b2e1f5f82 tempest-MultipleCreateTestJSON-1170863195 tempest-MultipleCreateTestJSON-1170863195-project-member] Lock "263caec2-5b70-4a83-9567-057ea7526bf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.197s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.852537] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1193.911176] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.911447] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.912991] env[68282]: INFO nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.087681] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.105652] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.276323] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9227b4-7a6c-4f86-9f86-d867a4cdc904 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.284194] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93434a88-d752-4bf8-974e-c863ee625dfe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.314403] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036fb064-e2f7-4b23-abc9-0184e9c07ec0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.321750] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5d4cc8-8f45-419a-a202-cd769ff79a58 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.335692] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.344597] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1194.361152] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.450s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.361678] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1194.363970] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.259s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.364218] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.364465] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1194.366053] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084e115c-1bf7-42e6-b16f-334adb0c7b98 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.373971] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f19410-11a8-4585-a262-afdb29ebab7a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.387451] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015fed2e-a693-4d74-a73e-09deb07a0483 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.393655] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f34067-5427-4d7f-bf29-0a9109ad21cd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.398181] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.399784] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1194.399884] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1194.426758] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180865MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1194.427045] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.427125] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.429572] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1194.501691] env[68282]: DEBUG nova.policy [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '143a98c51c9c40ebbff887a693684682', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a237c9e184054632bfe56e082bcc95cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1194.504768] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1194.507887] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 42977331-21c5-4169-889f-37dfbb10b6ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.507887] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.507887] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.507887] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508501] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508501] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508501] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508501] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508501] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.508819] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.526273] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.539287] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.543920] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1194.544175] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1194.544357] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.544548] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1194.544700] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.544848] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1194.545070] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1194.545244] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1194.545623] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1194.545623] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1194.545777] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1194.546836] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63a303b-b1a1-4df5-b278-46ce9503a957 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.550630] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.557732] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3ac503-516c-447c-bf60-8fa7c876fbdb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.562953] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e06eb2ba-c8a8-41b8-89b8-2afb94abe501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.575073] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 52507e06-547b-4fbe-8689-cf497332c7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.586264] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8b6e3667-a6d8-4840-a849-7f4e26f93767 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.616616] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.629604] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.640583] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e86e97e0-c191-4f39-9e71-1e99dfbbe65f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.653918] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 340f09fe-88a8-4b55-bf56-771d9fe1a14a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.666354] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 70943b53-2303-4cf8-8e02-95cbf011454c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.676028] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance da3a24eb-1e73-4137-b54f-da8077968d78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.687176] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.697427] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f1e39c16-6bf4-4b22-9bac-b82d176df7f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.697679] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1194.697829] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1194.868139] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Successfully created port: ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1195.096263] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c732c68-6459-4686-8b6b-ea795f95f676 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.107464] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1d0a83-b961-41d9-8fb8-481910ba4a29 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.150395] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadaf2ca-5454-41ac-b2ed-abe615cb993e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.159393] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83db5783-70fd-45ac-bc55-978ce68e0367 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.178414] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.187489] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1195.204633] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1195.204810] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.778s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.391942] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "121db530-a9de-4bb9-9d5a-0a88d9587881" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.392051] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.552125] env[68282]: DEBUG nova.compute.manager [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Received event network-vif-plugged-ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1195.552384] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] Acquiring lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.552556] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.552724] env[68282]: DEBUG oslo_concurrency.lockutils [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.552883] env[68282]: DEBUG nova.compute.manager [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] No waiting events found dispatching network-vif-plugged-ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1195.553054] env[68282]: WARNING nova.compute.manager [req-3c3b8df4-b2cd-4a5d-8f72-d2672c0082f8 req-a875c93a-0c02-4df6-a03c-f263414d2575 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Received unexpected event network-vif-plugged-ccf1db2d-0c66-4431-a259-1408cbfbe05a for instance with vm_state building and task_state spawning. [ 1195.599750] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Successfully updated port: ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1195.610661] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.610661] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.610661] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1195.659683] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1195.864294] env[68282]: WARNING oslo_vmware.rw_handles [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1195.864294] env[68282]: ERROR oslo_vmware.rw_handles [ 1195.864726] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore1 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1195.865952] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1195.866211] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Copying Virtual Disk [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore1] vmware_temp/d9572002-77d6-4418-8c22-99176be99105/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1195.866528] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e17bdf2-25f5-471f-928b-bd47056a8958 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.869891] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Updating instance_info_cache with network_info: [{"id": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "address": "fa:16:3e:5c:bb:5b", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf1db2d-0c", "ovs_interfaceid": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.875935] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for the task: (returnval){ [ 1195.875935] env[68282]: value = "task-3470521" [ 1195.875935] env[68282]: _type = "Task" [ 1195.875935] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.884823] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Task: {'id': task-3470521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.887849] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1195.888152] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance network_info: |[{"id": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "address": "fa:16:3e:5c:bb:5b", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf1db2d-0c", "ovs_interfaceid": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1195.888573] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:bb:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccf1db2d-0c66-4431-a259-1408cbfbe05a', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1195.895970] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating folder: Project (a237c9e184054632bfe56e082bcc95cb). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1195.896524] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-522aabb7-9279-450d-a4b4-5746affe5cba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.904972] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created folder: Project (a237c9e184054632bfe56e082bcc95cb) in parent group-v693573. [ 1195.905206] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating folder: Instances. Parent ref: group-v693637. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1195.905445] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63497cfb-406d-481e-b435-cea8ba86fb0d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.914303] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created folder: Instances in parent group-v693637. [ 1195.914561] env[68282]: DEBUG oslo.service.loopingcall [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1195.914752] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1195.914952] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad3c7887-a763-4716-9157-640c716e6edd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.932689] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1195.932933] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1195.936560] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1195.936560] env[68282]: value = "task-3470524" [ 1195.936560] env[68282]: _type = "Task" [ 1195.936560] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.944211] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470524, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.385915] env[68282]: DEBUG oslo_vmware.exceptions [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1196.386309] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.386749] env[68282]: ERROR nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1196.386749] env[68282]: Faults: ['InvalidArgument'] [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Traceback (most recent call last): [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] yield resources [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self.driver.spawn(context, instance, image_meta, [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self._fetch_image_if_missing(context, vi) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] image_cache(vi, tmp_image_ds_loc) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] vm_util.copy_virtual_disk( [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] session._wait_for_task(vmdk_copy_task) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return self.wait_for_task(task_ref) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return evt.wait() [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] result = hub.switch() [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return self.greenlet.switch() [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self.f(*self.args, **self.kw) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] raise exceptions.translate_fault(task_info.error) [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Faults: ['InvalidArgument'] [ 1196.386749] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] [ 1196.387884] env[68282]: INFO nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Terminating instance [ 1196.390104] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1196.390306] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1196.391067] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a3621-f887-4c50-84d5-57b78ad56c44 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.400871] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1196.401121] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58bf3a61-8ea8-4b04-b65a-6d7d05a91d24 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.449588] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470524, 'name': CreateVM_Task, 'duration_secs': 0.334952} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.449690] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1196.450570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.450570] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.450866] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1196.451327] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b07c9f-1ea5-4506-870b-004e4a203855 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.456045] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1196.456045] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5241c9f8-7176-29c1-a57d-835412395688" [ 1196.456045] env[68282]: _type = "Task" [ 1196.456045] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.460291] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1196.460521] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Deleting contents of the VM from datastore datastore1 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1196.460740] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Deleting the datastore file [datastore1] 9744e475-d8e8-48f7-85e3-c888ab6f25a4 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.461407] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a8eab69-512d-4430-8e8e-65499b13f0e8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.466227] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5241c9f8-7176-29c1-a57d-835412395688, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.470116] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for the task: (returnval){ [ 1196.470116] env[68282]: value = "task-3470526" [ 1196.470116] env[68282]: _type = "Task" [ 1196.470116] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.477059] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Task: {'id': task-3470526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.967228] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.967631] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1196.967711] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.979051] env[68282]: DEBUG oslo_vmware.api [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Task: {'id': task-3470526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074762} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.979307] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.979492] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Deleted contents of the VM from datastore datastore1 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1196.979668] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1196.979841] env[68282]: INFO nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1196.981974] env[68282]: DEBUG nova.compute.claims [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1196.982170] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.982386] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.478327] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eac8427-2745-4357-b5cc-5d498bf9c2ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.486430] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a9a708-6027-4caf-8853-30cb92b407f2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.516720] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3538c78-08c9-4361-9f26-6fdc0848c148 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.523977] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d168d35-66dd-421b-b35c-f33c14bb6efa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.538356] env[68282]: DEBUG nova.compute.provider_tree [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.551246] env[68282]: DEBUG nova.scheduler.client.report [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1197.569630] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.587s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.570189] env[68282]: ERROR nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1197.570189] env[68282]: Faults: ['InvalidArgument'] [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Traceback (most recent call last): [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self.driver.spawn(context, instance, image_meta, [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self._fetch_image_if_missing(context, vi) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] image_cache(vi, tmp_image_ds_loc) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] vm_util.copy_virtual_disk( [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] session._wait_for_task(vmdk_copy_task) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return self.wait_for_task(task_ref) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return evt.wait() [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] result = hub.switch() [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] return self.greenlet.switch() [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] self.f(*self.args, **self.kw) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] raise exceptions.translate_fault(task_info.error) [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Faults: ['InvalidArgument'] [ 1197.570189] env[68282]: ERROR nova.compute.manager [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] [ 1197.571029] env[68282]: DEBUG nova.compute.utils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1197.574404] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Build of instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 was re-scheduled: A specified parameter was not correct: fileType [ 1197.574404] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1197.574888] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1197.575087] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1197.575312] env[68282]: DEBUG nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1197.575502] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1197.579936] env[68282]: DEBUG nova.compute.manager [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Received event network-changed-ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1197.580154] env[68282]: DEBUG nova.compute.manager [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Refreshing instance network info cache due to event network-changed-ccf1db2d-0c66-4431-a259-1408cbfbe05a. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1197.580374] env[68282]: DEBUG oslo_concurrency.lockutils [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] Acquiring lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1197.580520] env[68282]: DEBUG oslo_concurrency.lockutils [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] Acquired lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.580690] env[68282]: DEBUG nova.network.neutron [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Refreshing network info cache for port ccf1db2d-0c66-4431-a259-1408cbfbe05a {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1198.079620] env[68282]: DEBUG nova.network.neutron [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.100783] env[68282]: INFO nova.compute.manager [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Took 0.53 seconds to deallocate network for instance. [ 1198.199245] env[68282]: DEBUG nova.network.neutron [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Updated VIF entry in instance network info cache for port ccf1db2d-0c66-4431-a259-1408cbfbe05a. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1198.199605] env[68282]: DEBUG nova.network.neutron [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Updating instance_info_cache with network_info: [{"id": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "address": "fa:16:3e:5c:bb:5b", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf1db2d-0c", "ovs_interfaceid": "ccf1db2d-0c66-4431-a259-1408cbfbe05a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.213104] env[68282]: DEBUG oslo_concurrency.lockutils [req-1ccf07c8-5e38-4d58-87ba-839b60dcd274 req-a265bf7f-b552-4c76-bb82-172986fb6352 service nova] Releasing lock "refresh_cache-a1676f73-3871-4f59-8440-3ccb27a9a7b9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.232923] env[68282]: INFO nova.scheduler.client.report [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Deleted allocations for instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 [ 1198.252969] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2a3ca640-0d4a-4e8f-ad69-43870cb37198 tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 248.780s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.254178] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 52.627s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.254421] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.254710] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.254897] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.258481] env[68282]: INFO nova.compute.manager [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Terminating instance [ 1198.260854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquiring lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1198.261029] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Acquired lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.261201] env[68282]: DEBUG nova.network.neutron [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1198.275173] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1198.292086] env[68282]: DEBUG nova.network.neutron [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1198.350031] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.350484] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.351857] env[68282]: INFO nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1198.605768] env[68282]: DEBUG nova.network.neutron [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.618892] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Releasing lock "refresh_cache-9744e475-d8e8-48f7-85e3-c888ab6f25a4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1198.619487] env[68282]: DEBUG nova.compute.manager [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1198.619984] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.620607] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8e251c6-9d52-4718-ad97-1f7d9a85b3cc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.634299] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86db987d-3dd2-4632-a383-3c3756856ed7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.666694] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9744e475-d8e8-48f7-85e3-c888ab6f25a4 could not be found. [ 1198.666925] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1198.667168] env[68282]: INFO nova.compute.manager [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1198.667458] env[68282]: DEBUG oslo.service.loopingcall [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1198.670138] env[68282]: DEBUG nova.compute.manager [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1198.670366] env[68282]: DEBUG nova.network.neutron [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1198.698754] env[68282]: DEBUG nova.network.neutron [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1198.709783] env[68282]: DEBUG nova.network.neutron [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.719414] env[68282]: INFO nova.compute.manager [-] [instance: 9744e475-d8e8-48f7-85e3-c888ab6f25a4] Took 0.05 seconds to deallocate network for instance. [ 1198.833851] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ff6f4ce4-1364-4727-80f6-e41d88741c3c tempest-ServerAddressesNegativeTestJSON-582277251 tempest-ServerAddressesNegativeTestJSON-582277251-project-member] Lock "9744e475-d8e8-48f7-85e3-c888ab6f25a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.580s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.858837] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f2866f-e0da-4a9a-b79b-c9f76bfb84e8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.871031] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90daf5b6-3e87-443c-ba45-06742dc09e55 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.908202] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a65f13d-5616-4caa-91dd-f6d1e6490372 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.915742] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c8adac-4214-445c-a8b3-382939e1fce1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.930524] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.945450] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1198.965466] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.614s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.965466] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1199.002963] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1199.004731] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1199.004731] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1199.023715] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1199.112656] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1199.146900] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1199.147177] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1199.147339] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1199.147526] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1199.147677] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1199.147827] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1199.148049] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1199.148218] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1199.148388] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1199.148555] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1199.148725] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1199.149647] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1629b582-9505-4202-8523-26d3e4ace5e3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.161683] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78a645f-396b-4a2f-9621-289a967ed394 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.177130] env[68282]: DEBUG nova.policy [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '143a98c51c9c40ebbff887a693684682', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a237c9e184054632bfe56e082bcc95cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1199.204728] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.205009] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.205188] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.205343] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1200.149116] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Successfully created port: bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1201.083637] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.088235] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.088235] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1201.088235] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1201.127623] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128280] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128280] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128355] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128516] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128584] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128702] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128816] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.128941] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.129075] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1201.129205] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1201.130269] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.131317] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.317026] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Successfully updated port: bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.335412] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.335577] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.335747] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1201.430370] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1201.674660] env[68282]: DEBUG nova.compute.manager [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Received event network-vif-plugged-bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1201.675016] env[68282]: DEBUG oslo_concurrency.lockutils [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] Acquiring lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.675714] env[68282]: DEBUG oslo_concurrency.lockutils [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.675972] env[68282]: DEBUG oslo_concurrency.lockutils [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.676177] env[68282]: DEBUG nova.compute.manager [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] No waiting events found dispatching network-vif-plugged-bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1201.676351] env[68282]: WARNING nova.compute.manager [req-7a3451e7-48ad-45ea-a106-9d2e8fcb93a1 req-b2cf8623-b041-46db-a63d-194cd6b345b0 service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Received unexpected event network-vif-plugged-bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca for instance with vm_state building and task_state spawning. [ 1201.690619] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Updating instance_info_cache with network_info: [{"id": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "address": "fa:16:3e:62:43:eb", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdcfdc7c-0e", "ovs_interfaceid": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.710029] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1201.710029] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance network_info: |[{"id": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "address": "fa:16:3e:62:43:eb", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdcfdc7c-0e", "ovs_interfaceid": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1201.710750] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:43:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1201.720712] env[68282]: DEBUG oslo.service.loopingcall [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.724233] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1201.724233] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d01856d-cd30-435e-928f-4775969c2d71 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.748492] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1201.748492] env[68282]: value = "task-3470527" [ 1201.748492] env[68282]: _type = "Task" [ 1201.748492] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.760427] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470527, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.858356] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f3e94041-9e33-4906-9cd1-e100cd993789 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "7c17f658-a502-4e35-a4d4-5b8e37da47c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.858581] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f3e94041-9e33-4906-9cd1-e100cd993789 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "7c17f658-a502-4e35-a4d4-5b8e37da47c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.087894] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.259061] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470527, 'name': CreateVM_Task, 'duration_secs': 0.300171} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.259124] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1202.259981] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.259981] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.260339] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1202.261010] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08da9155-7a0f-4cf9-8610-bd6caf16a394 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.265614] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1202.265614] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52deaa60-f0f3-5e91-5b7b-8d68378bb5fd" [ 1202.265614] env[68282]: _type = "Task" [ 1202.265614] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.274893] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52deaa60-f0f3-5e91-5b7b-8d68378bb5fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.777933] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.778230] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1202.778436] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.704470] env[68282]: DEBUG nova.compute.manager [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Received event network-changed-bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1203.705195] env[68282]: DEBUG nova.compute.manager [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Refreshing instance network info cache due to event network-changed-bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1203.705195] env[68282]: DEBUG oslo_concurrency.lockutils [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] Acquiring lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.705195] env[68282]: DEBUG oslo_concurrency.lockutils [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] Acquired lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.705482] env[68282]: DEBUG nova.network.neutron [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Refreshing network info cache for port bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1204.067638] env[68282]: DEBUG nova.network.neutron [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Updated VIF entry in instance network info cache for port bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1204.068073] env[68282]: DEBUG nova.network.neutron [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Updating instance_info_cache with network_info: [{"id": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "address": "fa:16:3e:62:43:eb", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbdcfdc7c-0e", "ovs_interfaceid": "bdcfdc7c-0eea-4957-8139-7edcb0a1b1ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.079007] env[68282]: DEBUG oslo_concurrency.lockutils [req-af001265-e8c9-4345-a5b6-ccc62761101e req-c0b10cd6-e2d9-4641-a69b-c1affbcce63b service nova] Releasing lock "refresh_cache-6d7028c7-2233-4f8e-8600-bca7edb1029d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.628063] env[68282]: DEBUG oslo_concurrency.lockutils [None req-963c7643-83d4-40c8-86fb-ddfa340b7b56 tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Acquiring lock "a7576467-3848-400d-925f-0a1a070dbf07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.628403] env[68282]: DEBUG oslo_concurrency.lockutils [None req-963c7643-83d4-40c8-86fb-ddfa340b7b56 tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Lock "a7576467-3848-400d-925f-0a1a070dbf07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.061420] env[68282]: DEBUG oslo_concurrency.lockutils [None req-34e12bd0-c93e-4a04-a568-d35c94c7388b tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Acquiring lock "81d2e3d5-2b11-4c9a-93eb-16e6929aada0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.061420] env[68282]: DEBUG oslo_concurrency.lockutils [None req-34e12bd0-c93e-4a04-a568-d35c94c7388b tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Lock "81d2e3d5-2b11-4c9a-93eb-16e6929aada0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.507052] env[68282]: WARNING oslo_vmware.rw_handles [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1237.507052] env[68282]: ERROR oslo_vmware.rw_handles [ 1237.507052] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1237.509232] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1237.509509] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Copying Virtual Disk [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/96b46852-5857-4bd3-ab4d-93f49a5d1bf2/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1237.509812] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29b17e8c-2e64-4428-98f8-742980ef7c7f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.517786] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for the task: (returnval){ [ 1237.517786] env[68282]: value = "task-3470528" [ 1237.517786] env[68282]: _type = "Task" [ 1237.517786] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.525387] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Task: {'id': task-3470528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.029937] env[68282]: DEBUG oslo_vmware.exceptions [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1238.030273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.030826] env[68282]: ERROR nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1238.030826] env[68282]: Faults: ['InvalidArgument'] [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Traceback (most recent call last): [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] yield resources [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self.driver.spawn(context, instance, image_meta, [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self._fetch_image_if_missing(context, vi) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] image_cache(vi, tmp_image_ds_loc) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] vm_util.copy_virtual_disk( [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] session._wait_for_task(vmdk_copy_task) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return self.wait_for_task(task_ref) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return evt.wait() [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] result = hub.switch() [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return self.greenlet.switch() [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self.f(*self.args, **self.kw) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] raise exceptions.translate_fault(task_info.error) [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Faults: ['InvalidArgument'] [ 1238.030826] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] [ 1238.031922] env[68282]: INFO nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Terminating instance [ 1238.032838] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.033351] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1238.033927] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1238.034145] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1238.034376] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b784778-501e-4434-bc7d-7838783e2e0f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.036863] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f2d9b0-efeb-44ec-91ae-89b6a14828e1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.044071] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1238.044309] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e262f62c-c4b2-46b7-832a-67f017add93a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.046672] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1238.046852] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1238.047868] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f1c1f5b-6378-4365-8ab5-ca411731d754 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.052764] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1238.052764] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5253ebac-5e24-9ea7-3039-c2d6277544ba" [ 1238.052764] env[68282]: _type = "Task" [ 1238.052764] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.060561] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5253ebac-5e24-9ea7-3039-c2d6277544ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.120285] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1238.120464] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1238.120628] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Deleting the datastore file [datastore2] 42977331-21c5-4169-889f-37dfbb10b6ef {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.120911] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fcfafdcb-b147-458d-8d7b-5fa59401051d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.127265] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for the task: (returnval){ [ 1238.127265] env[68282]: value = "task-3470530" [ 1238.127265] env[68282]: _type = "Task" [ 1238.127265] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.135671] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Task: {'id': task-3470530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.563977] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1238.564264] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1238.564546] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3211b48-c4fc-477a-b69d-5944f86f5dbe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.577517] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1238.577733] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Fetch image to [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1238.577909] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1238.578703] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772d4a26-81b1-410e-9528-e84c41451a84 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.585814] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65680b6-aeb8-48e7-8481-c16ca6b1771a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.595401] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f14b600-cc96-4458-a535-1b661fc22d75 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.627390] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6848a67-a873-415f-a60c-1f5a6a552ff9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.639079] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-275f062c-44e6-43e2-b68a-3488d7fe712b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.641493] env[68282]: DEBUG oslo_vmware.api [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Task: {'id': task-3470530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079946} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.641847] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.642043] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1238.642181] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1238.642360] env[68282]: INFO nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1238.644932] env[68282]: DEBUG nova.compute.claims [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1238.645124] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.645336] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.671823] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1238.738860] env[68282]: DEBUG oslo_vmware.rw_handles [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1238.800862] env[68282]: DEBUG oslo_vmware.rw_handles [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1238.800862] env[68282]: DEBUG oslo_vmware.rw_handles [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1239.136889] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d73d99-ab02-4623-ace4-d7b0f5aa93c2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.147107] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38817894-8522-4375-987e-45f196f3541e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.187187] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe638bf0-d66d-4de5-ba58-1f85b3b04bd3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.194789] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78092f6-d159-40b9-a313-353d5a4f54aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.208009] env[68282]: DEBUG nova.compute.provider_tree [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.219848] env[68282]: DEBUG nova.scheduler.client.report [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1239.236041] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.590s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.238031] env[68282]: ERROR nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.238031] env[68282]: Faults: ['InvalidArgument'] [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Traceback (most recent call last): [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self.driver.spawn(context, instance, image_meta, [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self._fetch_image_if_missing(context, vi) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] image_cache(vi, tmp_image_ds_loc) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] vm_util.copy_virtual_disk( [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] session._wait_for_task(vmdk_copy_task) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return self.wait_for_task(task_ref) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return evt.wait() [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] result = hub.switch() [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] return self.greenlet.switch() [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] self.f(*self.args, **self.kw) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] raise exceptions.translate_fault(task_info.error) [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Faults: ['InvalidArgument'] [ 1239.238031] env[68282]: ERROR nova.compute.manager [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] [ 1239.238031] env[68282]: DEBUG nova.compute.utils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1239.239444] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Build of instance 42977331-21c5-4169-889f-37dfbb10b6ef was re-scheduled: A specified parameter was not correct: fileType [ 1239.239444] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1239.239829] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1239.240015] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1239.240204] env[68282]: DEBUG nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1239.240376] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1239.803015] env[68282]: DEBUG nova.network.neutron [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.813671] env[68282]: INFO nova.compute.manager [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Took 0.57 seconds to deallocate network for instance. [ 1239.908970] env[68282]: INFO nova.scheduler.client.report [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Deleted allocations for instance 42977331-21c5-4169-889f-37dfbb10b6ef [ 1239.928862] env[68282]: DEBUG oslo_concurrency.lockutils [None req-50468417-b52a-4df5-9238-5f7b5997b420 tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 513.313s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.929957] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 311.705s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.930195] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Acquiring lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.930405] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.930577] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.932580] env[68282]: INFO nova.compute.manager [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Terminating instance [ 1240.556605] env[68282]: DEBUG nova.compute.manager [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1240.556848] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1240.557211] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1240.559699] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9ed6d89-744d-46b5-ae5d-b7cf65d4b90c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.568539] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329895ca-263d-4e54-81c0-3861b7550922 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.596120] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 42977331-21c5-4169-889f-37dfbb10b6ef could not be found. [ 1240.596333] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1240.596509] env[68282]: INFO nova.compute.manager [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1240.596749] env[68282]: DEBUG oslo.service.loopingcall [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1240.596964] env[68282]: DEBUG nova.compute.manager [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1240.597073] env[68282]: DEBUG nova.network.neutron [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1240.613468] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.613712] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.615112] env[68282]: INFO nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1240.627349] env[68282]: DEBUG nova.network.neutron [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.635933] env[68282]: INFO nova.compute.manager [-] [instance: 42977331-21c5-4169-889f-37dfbb10b6ef] Took 0.04 seconds to deallocate network for instance. [ 1240.720243] env[68282]: DEBUG oslo_concurrency.lockutils [None req-36fff757-5c4a-4bef-b06c-c33282a5c50d tempest-FloatingIPsAssociationNegativeTestJSON-312201371 tempest-FloatingIPsAssociationNegativeTestJSON-312201371-project-member] Lock "42977331-21c5-4169-889f-37dfbb10b6ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.790s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.973611] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b161c7a-335e-4b5f-a2c8-edccc699bc3d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.981134] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cff01da-acfd-4369-9e7c-92fe65696f54 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.011445] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7fda2ea-4c0b-46dc-9b21-ef2f87db5b0e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.018062] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c2e774-cd7b-4fd2-a1c5-615f2c5903d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.030704] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.039601] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1241.053318] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.439s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.053872] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1241.086218] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1241.087722] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1241.087891] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1241.099012] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1241.175465] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1241.178775] env[68282]: DEBUG nova.policy [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '143a98c51c9c40ebbff887a693684682', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a237c9e184054632bfe56e082bcc95cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1241.201879] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1241.202171] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1241.202335] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1241.202517] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1241.202667] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1241.202816] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1241.203141] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1241.203356] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1241.203539] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1241.203727] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1241.203914] env[68282]: DEBUG nova.virt.hardware [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1241.204790] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf75cb38-a26d-4241-ba25-50da8b6bb29f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.212541] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ce7b64-336b-4a68-8ba1-772fd4710b91 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.737959] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Successfully created port: 68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1242.475245] env[68282]: DEBUG nova.compute.manager [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Received event network-vif-plugged-68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1242.475495] env[68282]: DEBUG oslo_concurrency.lockutils [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] Acquiring lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1242.475673] env[68282]: DEBUG oslo_concurrency.lockutils [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1242.475846] env[68282]: DEBUG oslo_concurrency.lockutils [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1242.476107] env[68282]: DEBUG nova.compute.manager [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] No waiting events found dispatching network-vif-plugged-68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1242.476340] env[68282]: WARNING nova.compute.manager [req-54f74e7a-de39-4695-beb1-6e400d7fd872 req-73e3b7c8-e0b7-47d1-a241-0e40a567ca3b service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Received unexpected event network-vif-plugged-68f2c293-cf17-4249-a731-a6a30757154a for instance with vm_state building and task_state spawning. [ 1242.579745] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Successfully updated port: 68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.593618] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.593802] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.593964] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1242.669586] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1242.879547] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Updating instance_info_cache with network_info: [{"id": "68f2c293-cf17-4249-a731-a6a30757154a", "address": "fa:16:3e:1a:6d:f7", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68f2c293-cf", "ovs_interfaceid": "68f2c293-cf17-4249-a731-a6a30757154a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.891468] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1242.891745] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance network_info: |[{"id": "68f2c293-cf17-4249-a731-a6a30757154a", "address": "fa:16:3e:1a:6d:f7", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68f2c293-cf", "ovs_interfaceid": "68f2c293-cf17-4249-a731-a6a30757154a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1242.892146] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:6d:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68f2c293-cf17-4249-a731-a6a30757154a', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1242.899637] env[68282]: DEBUG oslo.service.loopingcall [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1242.900089] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1242.900338] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d12c2e2-6849-4d2c-89a1-92bf9a305cef {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.920670] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1242.920670] env[68282]: value = "task-3470531" [ 1242.920670] env[68282]: _type = "Task" [ 1242.920670] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.928197] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470531, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.431159] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470531, 'name': CreateVM_Task, 'duration_secs': 0.281419} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.431332] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1243.432061] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.432244] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.432556] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1243.432802] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe5d07eb-edcf-400c-84d5-cb358dc851b5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.437555] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1243.437555] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521f7d83-2667-bc44-f696-961c377d95af" [ 1243.437555] env[68282]: _type = "Task" [ 1243.437555] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.445740] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521f7d83-2667-bc44-f696-961c377d95af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.948883] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.949266] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1243.949359] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.547879] env[68282]: DEBUG nova.compute.manager [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Received event network-changed-68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1244.548100] env[68282]: DEBUG nova.compute.manager [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Refreshing instance network info cache due to event network-changed-68f2c293-cf17-4249-a731-a6a30757154a. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1244.548318] env[68282]: DEBUG oslo_concurrency.lockutils [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] Acquiring lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.548459] env[68282]: DEBUG oslo_concurrency.lockutils [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] Acquired lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.548618] env[68282]: DEBUG nova.network.neutron [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Refreshing network info cache for port 68f2c293-cf17-4249-a731-a6a30757154a {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1244.823478] env[68282]: DEBUG nova.network.neutron [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Updated VIF entry in instance network info cache for port 68f2c293-cf17-4249-a731-a6a30757154a. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1244.823868] env[68282]: DEBUG nova.network.neutron [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Updating instance_info_cache with network_info: [{"id": "68f2c293-cf17-4249-a731-a6a30757154a", "address": "fa:16:3e:1a:6d:f7", "network": {"id": "4f393110-f7f2-4f0d-a11a-1f2e94acf61e", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-476476380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a237c9e184054632bfe56e082bcc95cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68f2c293-cf", "ovs_interfaceid": "68f2c293-cf17-4249-a731-a6a30757154a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.832896] env[68282]: DEBUG oslo_concurrency.lockutils [req-eb1b7ac5-e79e-48dc-a7b2-64eb791b8698 req-ee1cbe3c-8bde-449d-aaae-fdfa1c1a4026 service nova] Releasing lock "refresh_cache-d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.090019] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1253.012170] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "16824286-3e71-4f49-8a6e-93f10ec668d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.012492] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.095272] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.112351] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.112585] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.112755] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.112911] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1255.114221] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b086c5-f1d7-4993-a034-d4226eaec033 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.124497] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c06df94-43f4-46c3-9e77-c68702851854 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.138113] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f4772f-75ae-449a-b12e-50540a030437 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.144146] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a329321-13e2-49e1-8412-22a98937fb2a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.172355] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180865MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1255.172495] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.172679] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.286388] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.286562] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.286692] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.286825] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.286937] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.287073] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.287195] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.287309] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.287423] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.287546] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.299012] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.310076] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e06eb2ba-c8a8-41b8-89b8-2afb94abe501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.321339] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 52507e06-547b-4fbe-8689-cf497332c7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.332137] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8b6e3667-a6d8-4840-a849-7f4e26f93767 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.342035] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.352094] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.362075] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e86e97e0-c191-4f39-9e71-1e99dfbbe65f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.371687] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 340f09fe-88a8-4b55-bf56-771d9fe1a14a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.383156] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 70943b53-2303-4cf8-8e02-95cbf011454c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.392467] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance da3a24eb-1e73-4137-b54f-da8077968d78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.401895] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.412823] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f1e39c16-6bf4-4b22-9bac-b82d176df7f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.422059] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.430954] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.440675] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7c17f658-a502-4e35-a4d4-5b8e37da47c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.450862] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7576467-3848-400d-925f-0a1a070dbf07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.460301] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81d2e3d5-2b11-4c9a-93eb-16e6929aada0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.469357] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.469585] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1255.469727] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1255.770389] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997ad4f0-f7f4-4138-aba8-28853aec4aff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.777679] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6715fd1-4ce9-4a06-853a-ddea18d59f19 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.806661] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b35fb64-fd14-4826-97c1-9319402f3e12 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.813354] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76256830-bda6-4864-8d09-05b8952eaf37 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.826125] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.834220] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1255.848492] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1255.848661] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.676s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.848870] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.849014] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 1255.856667] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 1259.849111] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.849436] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.849502] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1260.088021] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.083677] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.105255] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.105432] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1261.105554] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1261.125474] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.125632] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.125768] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.125896] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126030] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126162] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126276] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126395] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126512] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126627] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1261.126746] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1261.127202] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.087847] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.087847] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.087390] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.087676] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.087820] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 1283.554041] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_power_states {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.576896] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 1283.576896] env[68282]: value = "domain-c8" [ 1283.576896] env[68282]: _type = "ClusterComputeResource" [ 1283.576896] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1283.578532] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1fff61-1278-411a-946c-3d9ccedb4164 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.595573] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 10 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1283.595740] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.595929] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid b081435b-64e1-4baa-a634-a2f22a3d9a29 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596113] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596278] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 50234924-2933-4a79-9a33-3cb968b6e08a {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596431] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid a7b5f30a-7ddf-4e8b-b57c-715e41819c29 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596579] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 66243637-f1f4-4c60-b12a-bbe30c423630 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596729] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid eeb7149b-8d07-4968-9089-d6278c4565e5 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.596884] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid a1676f73-3871-4f59-8440-3ccb27a9a7b9 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.597042] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 6d7028c7-2233-4f8e-8600-bca7edb1029d {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.597199] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1283.597513] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.597742] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.597943] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.598157] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "50234924-2933-4a79-9a33-3cb968b6e08a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.598352] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.598549] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "66243637-f1f4-4c60-b12a-bbe30c423630" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.598745] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "eeb7149b-8d07-4968-9089-d6278c4565e5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.598941] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.599149] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.599344] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.331356] env[68282]: WARNING oslo_vmware.rw_handles [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1286.331356] env[68282]: ERROR oslo_vmware.rw_handles [ 1286.332008] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1286.333791] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1286.334083] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Copying Virtual Disk [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/5df27400-ba4c-4726-abc8-7e9a3dac73c6/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1286.334369] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1affcf6c-7d77-4505-9b15-3f6e6c349c3b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.343529] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1286.343529] env[68282]: value = "task-3470532" [ 1286.343529] env[68282]: _type = "Task" [ 1286.343529] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.351051] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.854299] env[68282]: DEBUG oslo_vmware.exceptions [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1286.854629] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.855273] env[68282]: ERROR nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1286.855273] env[68282]: Faults: ['InvalidArgument'] [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Traceback (most recent call last): [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] yield resources [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self.driver.spawn(context, instance, image_meta, [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self._fetch_image_if_missing(context, vi) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] image_cache(vi, tmp_image_ds_loc) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] vm_util.copy_virtual_disk( [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] session._wait_for_task(vmdk_copy_task) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return self.wait_for_task(task_ref) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return evt.wait() [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] result = hub.switch() [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return self.greenlet.switch() [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self.f(*self.args, **self.kw) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] raise exceptions.translate_fault(task_info.error) [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Faults: ['InvalidArgument'] [ 1286.855273] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] [ 1286.856441] env[68282]: INFO nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Terminating instance [ 1286.857258] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.857471] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.857704] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e444732-fb30-416e-a7da-f6e85fe3317e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.859840] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1286.860044] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1286.860747] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7565b4-9d61-4687-be30-8af07f611cfd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.868335] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1286.868512] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a87854ca-66af-4807-9703-cd1885c2f1b9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.870556] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.870726] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1286.871719] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88f0ec66-6244-4ba0-b7ef-d230905ebdc0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.876292] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for the task: (returnval){ [ 1286.876292] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52612ada-0c5d-c8df-3b2e-018f64028729" [ 1286.876292] env[68282]: _type = "Task" [ 1286.876292] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.883253] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52612ada-0c5d-c8df-3b2e-018f64028729, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.938489] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1286.938718] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1286.938899] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleting the datastore file [datastore2] 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.939195] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3421875b-330d-46fb-bfe8-5b43403b521b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.945476] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1286.945476] env[68282]: value = "task-3470534" [ 1286.945476] env[68282]: _type = "Task" [ 1286.945476] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.953488] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.386104] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1287.386384] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Creating directory with path [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1287.386614] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c09810a2-1934-4bbb-8929-3faa046c47e1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.398277] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Created directory with path [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1287.398473] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Fetch image to [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1287.398648] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1287.399415] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ee4b71-2090-489c-aaae-3a99105a16f1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.405815] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726e8b62-cda6-4ea9-8626-6e6072ec50c9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.414639] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66da1322-d408-44bd-8762-d149e0f743f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.444280] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c9871d-bba3-49a4-a109-1f7f86eab4d0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.454978] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b42e68ac-bcba-4174-811e-a5af283c70aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.456696] env[68282]: DEBUG oslo_vmware.api [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078283} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.456934] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1287.457130] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1287.457300] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1287.457475] env[68282]: INFO nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1287.460095] env[68282]: DEBUG nova.compute.claims [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1287.460095] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.460095] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.478564] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1287.530144] env[68282]: DEBUG oslo_vmware.rw_handles [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1287.590739] env[68282]: DEBUG oslo_vmware.rw_handles [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1287.591031] env[68282]: DEBUG oslo_vmware.rw_handles [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1287.863257] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059670d2-3c6a-4f81-9772-4f47c951b3d3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.870560] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70947d0c-9f50-468c-b547-8d6e18f05593 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.901419] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192d83fa-a532-46da-b6ac-305a07ab3158 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.908495] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead4f77b-529c-471e-a7ab-ba3c008926f8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.921508] env[68282]: DEBUG nova.compute.provider_tree [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1287.930546] env[68282]: DEBUG nova.scheduler.client.report [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1287.948898] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.489s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.949482] env[68282]: ERROR nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1287.949482] env[68282]: Faults: ['InvalidArgument'] [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Traceback (most recent call last): [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self.driver.spawn(context, instance, image_meta, [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self._fetch_image_if_missing(context, vi) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] image_cache(vi, tmp_image_ds_loc) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] vm_util.copy_virtual_disk( [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] session._wait_for_task(vmdk_copy_task) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return self.wait_for_task(task_ref) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return evt.wait() [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] result = hub.switch() [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] return self.greenlet.switch() [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] self.f(*self.args, **self.kw) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] raise exceptions.translate_fault(task_info.error) [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Faults: ['InvalidArgument'] [ 1287.949482] env[68282]: ERROR nova.compute.manager [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] [ 1287.950393] env[68282]: DEBUG nova.compute.utils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1287.951942] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Build of instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 was re-scheduled: A specified parameter was not correct: fileType [ 1287.951942] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1287.952342] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1287.952526] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1287.952705] env[68282]: DEBUG nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1287.952872] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1288.366763] env[68282]: DEBUG nova.network.neutron [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.380557] env[68282]: INFO nova.compute.manager [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Took 0.43 seconds to deallocate network for instance. [ 1288.470461] env[68282]: INFO nova.scheduler.client.report [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted allocations for instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 [ 1288.491829] env[68282]: DEBUG oslo_concurrency.lockutils [None req-966f44e1-cd9e-4c1c-9aba-c8d3a80e8a0a tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 558.239s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.492944] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 358.576s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.492944] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.493223] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.493395] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.496331] env[68282]: INFO nova.compute.manager [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Terminating instance [ 1288.498332] env[68282]: DEBUG nova.compute.manager [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1288.498532] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1288.500051] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29820b8e-6ccb-490f-a3fb-5ffdcdc642ab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.509200] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f023da-7c92-4043-b8b5-fa25fe72ded1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.521719] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1288.544918] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b30ba17-99e9-44bc-bd78-73fe5d6cab05 could not be found. [ 1288.545204] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1288.545397] env[68282]: INFO nova.compute.manager [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1288.545647] env[68282]: DEBUG oslo.service.loopingcall [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1288.545874] env[68282]: DEBUG nova.compute.manager [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1288.546241] env[68282]: DEBUG nova.network.neutron [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1288.568822] env[68282]: DEBUG nova.network.neutron [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.570800] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.571037] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.572407] env[68282]: INFO nova.compute.claims [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1288.576671] env[68282]: INFO nova.compute.manager [-] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] Took 0.03 seconds to deallocate network for instance. [ 1288.663437] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c0b92b9d-d6f6-4260-a349-c3fcf9f157d8 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.664729] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.067s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.664729] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2b30ba17-99e9-44bc-bd78-73fe5d6cab05] During sync_power_state the instance has a pending task (deleting). Skip. [ 1288.664729] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "2b30ba17-99e9-44bc-bd78-73fe5d6cab05" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.897883] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1c3da1-0ca9-430e-8fca-dc413d394d6f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.905365] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6843cf15-4857-459b-b77c-7b6a37e95f4b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.938038] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea5ba39-faa7-4007-bdbf-ece6caf94ad8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.945135] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a12016-654d-48db-982f-ca1c8e99ae95 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.958207] env[68282]: DEBUG nova.compute.provider_tree [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1288.967258] env[68282]: DEBUG nova.scheduler.client.report [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1288.981396] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.410s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1288.981874] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1289.016487] env[68282]: DEBUG nova.compute.utils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1289.018078] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1289.018286] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1289.026450] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1289.095470] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1289.122700] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1289.123349] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1289.123349] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1289.123349] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1289.123521] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1289.123636] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1289.123792] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1289.123956] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1289.124173] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1289.124519] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1289.124519] env[68282]: DEBUG nova.virt.hardware [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1289.125423] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d779b5-b3dc-4bf0-a5df-2800b0206029 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.129319] env[68282]: DEBUG nova.policy [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeff02086d114be7816a6d2558c9c8fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2948d9c0a046a09077c014de41faeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1289.141021] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18909033-80bd-4057-9e0e-31988307ea00 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.557658] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Successfully created port: 7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1290.341198] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Successfully updated port: 7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1290.353711] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.353711] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.353711] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1290.622149] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1290.698602] env[68282]: DEBUG nova.compute.manager [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Received event network-vif-plugged-7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1290.698848] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Acquiring lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.699112] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.699299] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.699467] env[68282]: DEBUG nova.compute.manager [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] No waiting events found dispatching network-vif-plugged-7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1290.699634] env[68282]: WARNING nova.compute.manager [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Received unexpected event network-vif-plugged-7975cb99-5412-4259-a645-61dc1e39aa22 for instance with vm_state building and task_state spawning. [ 1290.699794] env[68282]: DEBUG nova.compute.manager [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Received event network-changed-7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1290.699950] env[68282]: DEBUG nova.compute.manager [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Refreshing instance network info cache due to event network-changed-7975cb99-5412-4259-a645-61dc1e39aa22. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1290.700310] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Acquiring lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1290.896184] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Updating instance_info_cache with network_info: [{"id": "7975cb99-5412-4259-a645-61dc1e39aa22", "address": "fa:16:3e:a2:70:de", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975cb99-54", "ovs_interfaceid": "7975cb99-5412-4259-a645-61dc1e39aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.913020] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.913020] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance network_info: |[{"id": "7975cb99-5412-4259-a645-61dc1e39aa22", "address": "fa:16:3e:a2:70:de", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975cb99-54", "ovs_interfaceid": "7975cb99-5412-4259-a645-61dc1e39aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1290.913020] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Acquired lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.913230] env[68282]: DEBUG nova.network.neutron [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Refreshing network info cache for port 7975cb99-5412-4259-a645-61dc1e39aa22 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1290.914801] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:70:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7975cb99-5412-4259-a645-61dc1e39aa22', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1290.922557] env[68282]: DEBUG oslo.service.loopingcall [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1290.925616] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1290.926103] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efcfae9c-30bf-4f9f-abcf-56cbd1ad61b2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.948399] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1290.948399] env[68282]: value = "task-3470535" [ 1290.948399] env[68282]: _type = "Task" [ 1290.948399] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.956305] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470535, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.257874] env[68282]: DEBUG nova.network.neutron [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Updated VIF entry in instance network info cache for port 7975cb99-5412-4259-a645-61dc1e39aa22. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1291.258278] env[68282]: DEBUG nova.network.neutron [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Updating instance_info_cache with network_info: [{"id": "7975cb99-5412-4259-a645-61dc1e39aa22", "address": "fa:16:3e:a2:70:de", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975cb99-54", "ovs_interfaceid": "7975cb99-5412-4259-a645-61dc1e39aa22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.268596] env[68282]: DEBUG oslo_concurrency.lockutils [req-26722e69-3a31-4f7c-9168-2e2af86e0d41 req-1584dc81-9b5c-493f-af17-f7e8d4b3e2d0 service nova] Releasing lock "refresh_cache-6680219f-25bf-453c-ba97-4aeb3295f62b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.458565] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470535, 'name': CreateVM_Task, 'duration_secs': 0.296885} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.458832] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1291.459551] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.459724] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.460653] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1291.460653] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73be8f6d-5bf6-4cab-b606-78fbe550b724 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.464681] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1291.464681] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c0aa65-c928-7a95-4f1b-c573f66789bc" [ 1291.464681] env[68282]: _type = "Task" [ 1291.464681] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.472148] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c0aa65-c928-7a95-4f1b-c573f66789bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.975579] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1291.975859] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1291.976080] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1315.087863] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1315.100719] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.100854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.101015] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.101188] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1315.102286] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cc3ca7-1eec-462d-aac6-73f23fe83310 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.111183] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b60ef60-a10e-49a4-91e4-27d3ce9d651b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.126043] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb3d3a0-d5f0-4b2c-9918-8dcfb5caadf6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.131528] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd99852-e411-4917-a78b-69f47bf0b32e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.161853] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180921MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1315.162049] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.162162] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.313553] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.313718] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.313864] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.313972] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314108] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314255] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314385] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314504] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314622] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.314748] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1315.326942] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e06eb2ba-c8a8-41b8-89b8-2afb94abe501 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.337744] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 52507e06-547b-4fbe-8689-cf497332c7de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.346707] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8b6e3667-a6d8-4840-a849-7f4e26f93767 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.356691] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.366719] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.376829] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e86e97e0-c191-4f39-9e71-1e99dfbbe65f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.386758] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 340f09fe-88a8-4b55-bf56-771d9fe1a14a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.396875] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 70943b53-2303-4cf8-8e02-95cbf011454c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.406575] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance da3a24eb-1e73-4137-b54f-da8077968d78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.417061] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.427025] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f1e39c16-6bf4-4b22-9bac-b82d176df7f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.437352] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.447363] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.458122] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7c17f658-a502-4e35-a4d4-5b8e37da47c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.468600] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7576467-3848-400d-925f-0a1a070dbf07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.479310] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81d2e3d5-2b11-4c9a-93eb-16e6929aada0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.490116] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1315.490236] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1315.490388] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1315.508309] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1315.523420] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1315.523616] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.534591] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1315.552191] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1315.896105] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb688b8-d6f3-4554-b33d-c84fa9a6358b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.903654] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba110b0c-c7b6-46f5-83db-5ecc5e9fa22c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.933349] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21cb6c15-4081-44f9-8390-5ce1d219824b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.940459] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d428d6-c26e-4800-959c-c754437d9c38 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.952994] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1315.961367] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1315.985054] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1315.985054] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.823s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.498038] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.695787] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.801504] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.984557] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.984885] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1321.984885] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1322.010197] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.010374] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.010512] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.010642] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.010769] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.010894] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.011133] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.011330] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.011462] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.011584] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1322.011707] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1322.012239] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.012437] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.012572] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1322.087370] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.087618] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.087788] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.087108] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.495875] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "6680219f-25bf-453c-ba97-4aeb3295f62b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.087587] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.197954] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "9874370f-917a-412b-91ce-a92e73d6ac0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.198321] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.913622] env[68282]: WARNING oslo_vmware.rw_handles [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1335.913622] env[68282]: ERROR oslo_vmware.rw_handles [ 1335.915122] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1335.915944] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1335.916211] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Copying Virtual Disk [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/86db70a9-50da-4fe4-bccc-53558b3ece3b/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1335.916568] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1025bcf-64c2-47d0-af08-815426d7ad96 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.924057] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for the task: (returnval){ [ 1335.924057] env[68282]: value = "task-3470536" [ 1335.924057] env[68282]: _type = "Task" [ 1335.924057] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.932063] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Task: {'id': task-3470536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.439361] env[68282]: DEBUG oslo_vmware.exceptions [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1336.440104] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.440826] env[68282]: ERROR nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1336.440826] env[68282]: Faults: ['InvalidArgument'] [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Traceback (most recent call last): [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] yield resources [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self.driver.spawn(context, instance, image_meta, [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self._fetch_image_if_missing(context, vi) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] image_cache(vi, tmp_image_ds_loc) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] vm_util.copy_virtual_disk( [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] session._wait_for_task(vmdk_copy_task) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return self.wait_for_task(task_ref) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return evt.wait() [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] result = hub.switch() [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return self.greenlet.switch() [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self.f(*self.args, **self.kw) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] raise exceptions.translate_fault(task_info.error) [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Faults: ['InvalidArgument'] [ 1336.440826] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] [ 1336.444017] env[68282]: INFO nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Terminating instance [ 1336.444571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.445063] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.445928] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07c82209-6275-4154-895a-7f93770e8ad4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.448989] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1336.449408] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1336.450644] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934918a6-1712-4324-a1db-88dd79266d13 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.459343] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1336.459929] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8b2b35f-5254-4e21-9a73-bb4e68a0c9a2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.462621] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.464954] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1336.464954] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72424cec-144b-4ca3-85c2-7417655da003 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.469676] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for the task: (returnval){ [ 1336.469676] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52d20293-7d1d-557f-6120-b457541028bc" [ 1336.469676] env[68282]: _type = "Task" [ 1336.469676] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.479190] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52d20293-7d1d-557f-6120-b457541028bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.522153] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1336.522567] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1336.522789] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Deleting the datastore file [datastore2] b081435b-64e1-4baa-a634-a2f22a3d9a29 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1336.523088] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d363ea50-e487-483b-b1be-484506ffcf78 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.530304] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for the task: (returnval){ [ 1336.530304] env[68282]: value = "task-3470538" [ 1336.530304] env[68282]: _type = "Task" [ 1336.530304] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.539511] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Task: {'id': task-3470538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.985635] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1336.985921] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Creating directory with path [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.986181] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ae174d1-10aa-45cd-97d5-161724822d33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.999251] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Created directory with path [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.999456] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Fetch image to [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1336.999625] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1337.000410] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8472b97-9555-4760-8864-ab7a1ea03cc8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.012967] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a2990a-17fa-46fa-bcdb-8e1fc4008306 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.023274] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4aa40d-7bbc-40b9-af72-c5492e7a616e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.059500] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8232db66-e614-4125-b44e-6dc615dc2bd1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.071555] env[68282]: DEBUG oslo_vmware.api [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Task: {'id': task-3470538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080446} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.073153] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1337.074551] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1337.074551] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1337.074551] env[68282]: INFO nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1337.075876] env[68282]: DEBUG nova.compute.claims [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1337.076107] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.076421] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.080350] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5fa5f8d3-4873-44ae-8f10-2f892984fab2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.103317] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1337.410320] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1337.490035] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1337.490035] env[68282]: DEBUG oslo_vmware.rw_handles [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1337.620250] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.620406] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.691252] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151fada9-6058-44fd-8235-a7974dcbfa36 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.701466] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46c3a10-2909-430d-a703-e2535ee24d12 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.732988] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a48f3c-3770-4282-aedb-96725c188aa3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.741027] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f753ec13-b79a-46d1-b31b-ddb59a372aff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.754501] env[68282]: DEBUG nova.compute.provider_tree [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.765277] env[68282]: DEBUG nova.scheduler.client.report [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1337.787584] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.711s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.788379] env[68282]: ERROR nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1337.788379] env[68282]: Faults: ['InvalidArgument'] [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Traceback (most recent call last): [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self.driver.spawn(context, instance, image_meta, [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self._fetch_image_if_missing(context, vi) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] image_cache(vi, tmp_image_ds_loc) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] vm_util.copy_virtual_disk( [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] session._wait_for_task(vmdk_copy_task) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return self.wait_for_task(task_ref) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return evt.wait() [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] result = hub.switch() [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] return self.greenlet.switch() [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] self.f(*self.args, **self.kw) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] raise exceptions.translate_fault(task_info.error) [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Faults: ['InvalidArgument'] [ 1337.788379] env[68282]: ERROR nova.compute.manager [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] [ 1337.790046] env[68282]: DEBUG nova.compute.utils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1337.791961] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Build of instance b081435b-64e1-4baa-a634-a2f22a3d9a29 was re-scheduled: A specified parameter was not correct: fileType [ 1337.791961] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1337.792415] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1337.792605] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1337.792990] env[68282]: DEBUG nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1337.793220] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1338.737748] env[68282]: DEBUG nova.network.neutron [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1338.744617] env[68282]: DEBUG oslo_concurrency.lockutils [None req-eaa94254-3b7c-4e01-8db3-20f5ee68ca38 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "56b8d673-b69b-4f07-9c35-12c9390ed505" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.744862] env[68282]: DEBUG oslo_concurrency.lockutils [None req-eaa94254-3b7c-4e01-8db3-20f5ee68ca38 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "56b8d673-b69b-4f07-9c35-12c9390ed505" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.753765] env[68282]: INFO nova.compute.manager [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Took 0.96 seconds to deallocate network for instance. [ 1338.867295] env[68282]: INFO nova.scheduler.client.report [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Deleted allocations for instance b081435b-64e1-4baa-a634-a2f22a3d9a29 [ 1338.894606] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1160cd8d-8a61-429c-96c8-b1709c67b491 tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 607.553s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.895070] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 408.165s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.896571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Acquiring lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.897301] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.897516] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.901028] env[68282]: INFO nova.compute.manager [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Terminating instance [ 1338.902622] env[68282]: DEBUG nova.compute.manager [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1338.902817] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1338.903865] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a179b81-c778-4179-bc5e-66f02777692c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.915787] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12d0ce-4aca-4648-8645-dfccded54947 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.929220] env[68282]: DEBUG nova.compute.manager [None req-3c9f7227-8ef4-4bf9-8ffd-079555ee3f9d tempest-ImagesOneServerNegativeTestJSON-1930787394 tempest-ImagesOneServerNegativeTestJSON-1930787394-project-member] [instance: e06eb2ba-c8a8-41b8-89b8-2afb94abe501] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1338.952776] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b081435b-64e1-4baa-a634-a2f22a3d9a29 could not be found. [ 1338.952935] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1338.952976] env[68282]: INFO nova.compute.manager [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1338.953254] env[68282]: DEBUG oslo.service.loopingcall [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1338.953511] env[68282]: DEBUG nova.compute.manager [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1338.953610] env[68282]: DEBUG nova.network.neutron [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1338.958103] env[68282]: DEBUG nova.compute.manager [None req-3c9f7227-8ef4-4bf9-8ffd-079555ee3f9d tempest-ImagesOneServerNegativeTestJSON-1930787394 tempest-ImagesOneServerNegativeTestJSON-1930787394-project-member] [instance: e06eb2ba-c8a8-41b8-89b8-2afb94abe501] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1338.993657] env[68282]: DEBUG oslo_concurrency.lockutils [None req-3c9f7227-8ef4-4bf9-8ffd-079555ee3f9d tempest-ImagesOneServerNegativeTestJSON-1930787394 tempest-ImagesOneServerNegativeTestJSON-1930787394-project-member] Lock "e06eb2ba-c8a8-41b8-89b8-2afb94abe501" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.452s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.995431] env[68282]: DEBUG nova.network.neutron [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.007028] env[68282]: DEBUG nova.compute.manager [None req-5cf49791-98e3-4e5e-8a18-024b14007d33 tempest-ServersTestJSON-1523968152 tempest-ServersTestJSON-1523968152-project-member] [instance: 52507e06-547b-4fbe-8689-cf497332c7de] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1339.010504] env[68282]: INFO nova.compute.manager [-] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] Took 0.06 seconds to deallocate network for instance. [ 1339.045129] env[68282]: DEBUG nova.compute.manager [None req-5cf49791-98e3-4e5e-8a18-024b14007d33 tempest-ServersTestJSON-1523968152 tempest-ServersTestJSON-1523968152-project-member] [instance: 52507e06-547b-4fbe-8689-cf497332c7de] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1339.086519] env[68282]: DEBUG oslo_concurrency.lockutils [None req-5cf49791-98e3-4e5e-8a18-024b14007d33 tempest-ServersTestJSON-1523968152 tempest-ServersTestJSON-1523968152-project-member] Lock "52507e06-547b-4fbe-8689-cf497332c7de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.516s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.101460] env[68282]: DEBUG nova.compute.manager [None req-c4b81251-101c-49a8-9fea-3fa8ce7e899a tempest-ServerActionsTestJSON-800717835 tempest-ServerActionsTestJSON-800717835-project-member] [instance: 8b6e3667-a6d8-4840-a849-7f4e26f93767] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1339.156686] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aeafd27e-e71d-422f-9c2f-a681863ebbcb tempest-ImagesNegativeTestJSON-1645862844 tempest-ImagesNegativeTestJSON-1645862844-project-member] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.262s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.158157] env[68282]: DEBUG nova.compute.manager [None req-c4b81251-101c-49a8-9fea-3fa8ce7e899a tempest-ServerActionsTestJSON-800717835 tempest-ServerActionsTestJSON-800717835-project-member] [instance: 8b6e3667-a6d8-4840-a849-7f4e26f93767] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1339.158635] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 55.561s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.158826] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b081435b-64e1-4baa-a634-a2f22a3d9a29] During sync_power_state the instance has a pending task (deleting). Skip. [ 1339.159010] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "b081435b-64e1-4baa-a634-a2f22a3d9a29" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.197540] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c4b81251-101c-49a8-9fea-3fa8ce7e899a tempest-ServerActionsTestJSON-800717835 tempest-ServerActionsTestJSON-800717835-project-member] Lock "8b6e3667-a6d8-4840-a849-7f4e26f93767" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.746s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.218780] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1339.295683] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.297028] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.297451] env[68282]: INFO nova.compute.claims [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1339.515458] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ac5909c8-cfb0-4df4-8e21-808adb7557e8 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "2f617973-033c-42e5-8451-2a565291d7c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.515458] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ac5909c8-cfb0-4df4-8e21-808adb7557e8 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "2f617973-033c-42e5-8451-2a565291d7c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.686151] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.849067] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b4d651-d539-4720-91b2-b599ee9e04da {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.860063] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcc0344-7d6e-44c3-b35f-bccabe525c4e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.892525] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bfaa11-de63-48f0-b5b1-eed91b2adf54 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.899727] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa61f66-75e4-46a3-9dd9-ab0ac9e21146 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.914874] env[68282]: DEBUG nova.compute.provider_tree [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.924638] env[68282]: DEBUG nova.scheduler.client.report [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1339.943801] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.648s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.944356] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1339.981142] env[68282]: DEBUG nova.compute.claims [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1339.981362] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.981591] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.620259] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a071c14-74f4-412c-9cc8-402dfa98886b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.629571] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f46e25-2c17-478e-abc5-5462f2a76e8d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.661412] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dc288f-c697-4a58-a95c-f24404c702a2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.669282] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32cbadc-20d1-4b07-89cd-5d67999bd5dc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.684739] env[68282]: DEBUG nova.compute.provider_tree [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1340.700428] env[68282]: DEBUG nova.scheduler.client.report [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1340.720716] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.739s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.724061] env[68282]: DEBUG nova.compute.utils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Conflict updating instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1340.725261] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance disappeared during build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1340.725583] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1340.725673] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.725855] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.725988] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1340.801147] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1341.002977] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.018093] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.018716] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1341.018900] env[68282]: DEBUG nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1341.019390] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.060474] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1341.073042] env[68282]: DEBUG nova.network.neutron [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.086815] env[68282]: INFO nova.compute.manager [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Took 0.07 seconds to deallocate network for instance. [ 1341.245669] env[68282]: INFO nova.scheduler.client.report [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleted allocations for instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6 [ 1341.245771] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f029471-4fb9-4306-b1b8-12a28aec0221 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.975s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.247113] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.561s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.247249] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.247780] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.247780] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.249351] env[68282]: INFO nova.compute.manager [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Terminating instance [ 1341.254708] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.254880] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.255065] env[68282]: DEBUG nova.network.neutron [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1341.264498] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1341.292639] env[68282]: DEBUG nova.network.neutron [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1341.323328] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.323589] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.325787] env[68282]: INFO nova.compute.claims [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.520164] env[68282]: DEBUG nova.network.neutron [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.535106] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "refresh_cache-aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.535581] env[68282]: DEBUG nova.compute.manager [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1341.535768] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1341.536351] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2d3020e5-4ae6-4ceb-bfad-ab3c7a51a2aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.549915] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4a62fd-f430-434b-bb98-510cb2b79333 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.583477] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6 could not be found. [ 1341.583704] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1341.583889] env[68282]: INFO nova.compute.manager [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1341.586488] env[68282]: DEBUG oslo.service.loopingcall [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1341.586488] env[68282]: DEBUG nova.compute.manager [-] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1341.586488] env[68282]: DEBUG nova.network.neutron [-] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1341.686457] env[68282]: DEBUG nova.network.neutron [-] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1341.704143] env[68282]: DEBUG nova.network.neutron [-] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.718197] env[68282]: INFO nova.compute.manager [-] [instance: aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6] Took 0.13 seconds to deallocate network for instance. [ 1341.839252] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1a708cec-b277-410d-99d6-b21dc5462bf0 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "aefee2e9-5d20-4fb1-bfd5-c1c6e6fa53b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.592s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.855289] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a48e359-f045-4826-a92e-d627b0bc5cd6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.864484] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0074a0-5142-4e63-9955-df0fdb546e02 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.899898] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8bdc75-b359-4241-a582-cfe758a131cf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.907287] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449e8a7e-b239-477d-b7fa-2264ab2be2b6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.921294] env[68282]: DEBUG nova.compute.provider_tree [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.933152] env[68282]: DEBUG nova.scheduler.client.report [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1341.949288] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.625s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.949677] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1341.988055] env[68282]: DEBUG nova.compute.utils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1341.989354] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1341.989530] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1342.006371] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1342.099956] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1342.193232] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1342.193503] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1342.193668] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1342.193947] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1342.194102] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1342.194263] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1342.194604] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1342.194832] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1342.195035] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1342.195221] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1342.195465] env[68282]: DEBUG nova.virt.hardware [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1342.196297] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a467c25-016b-4618-b773-c98e712232af {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.205421] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275012d8-305c-4239-a809-41366346e323 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.278419] env[68282]: DEBUG nova.policy [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1739c9a5271f4258b92aee5b031218d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95c697736cbe4520872cc8f1c143879b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1342.351953] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83809424-dbe7-4a75-af59-46874a52cc7d tempest-ServersNegativeTestMultiTenantJSON-1282295197 tempest-ServersNegativeTestMultiTenantJSON-1282295197-project-member] Acquiring lock "2d3581e8-19f6-4665-9b3c-a89dfdefb166" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.352217] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83809424-dbe7-4a75-af59-46874a52cc7d tempest-ServersNegativeTestMultiTenantJSON-1282295197 tempest-ServersNegativeTestMultiTenantJSON-1282295197-project-member] Lock "2d3581e8-19f6-4665-9b3c-a89dfdefb166" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.465334] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Successfully created port: b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1343.744659] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc904dc4-8fbc-47d5-a3dd-3d2d2ed24e19 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "567cf5f0-3e42-4f75-8b8d-978220c161d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.744894] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc904dc4-8fbc-47d5-a3dd-3d2d2ed24e19 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "567cf5f0-3e42-4f75-8b8d-978220c161d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.998393] env[68282]: DEBUG nova.compute.manager [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Received event network-vif-plugged-b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1344.998652] env[68282]: DEBUG oslo_concurrency.lockutils [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] Acquiring lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.998866] env[68282]: DEBUG oslo_concurrency.lockutils [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] Lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.999054] env[68282]: DEBUG oslo_concurrency.lockutils [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] Lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.999277] env[68282]: DEBUG nova.compute.manager [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] No waiting events found dispatching network-vif-plugged-b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1344.999393] env[68282]: WARNING nova.compute.manager [req-d8c26a87-8571-4568-9e8e-f804721860ab req-386a506b-c8ec-4f7c-af76-cc108d5e382c service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Received unexpected event network-vif-plugged-b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae for instance with vm_state building and task_state spawning. [ 1345.059018] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Successfully updated port: b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1345.069890] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.070197] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquired lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.070424] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1345.143588] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.312052] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Updating instance_info_cache with network_info: [{"id": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "address": "fa:16:3e:89:87:ab", "network": {"id": "8076d7cb-fc9d-41c1-adca-1523ecc03f80", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749812871-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c697736cbe4520872cc8f1c143879b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d72eb9-c4", "ovs_interfaceid": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.327145] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Releasing lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.327456] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance network_info: |[{"id": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "address": "fa:16:3e:89:87:ab", "network": {"id": "8076d7cb-fc9d-41c1-adca-1523ecc03f80", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749812871-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c697736cbe4520872cc8f1c143879b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d72eb9-c4", "ovs_interfaceid": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1345.327894] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:87:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.336098] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Creating folder: Project (95c697736cbe4520872cc8f1c143879b). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1345.336645] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecf9b4df-9789-4300-af24-72b914e3f7e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.346828] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Created folder: Project (95c697736cbe4520872cc8f1c143879b) in parent group-v693573. [ 1345.347016] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Creating folder: Instances. Parent ref: group-v693643. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1345.347255] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9709994b-6122-41dd-9a5e-f337bc4236d4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.355812] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Created folder: Instances in parent group-v693643. [ 1345.356062] env[68282]: DEBUG oslo.service.loopingcall [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.356251] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1345.356452] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea6252b0-be43-4982-93cb-0437fc7037d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.379489] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.379489] env[68282]: value = "task-3470541" [ 1345.379489] env[68282]: _type = "Task" [ 1345.379489] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.389704] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470541, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.888537] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470541, 'name': CreateVM_Task, 'duration_secs': 0.294862} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.888724] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1345.910383] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.910383] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.910383] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1345.910383] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63203e8a-323a-4f8e-9493-daa6d3451a24 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.915332] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for the task: (returnval){ [ 1345.915332] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52944b9a-1b1c-88e9-cb2a-6e11f73e2b4b" [ 1345.915332] env[68282]: _type = "Task" [ 1345.915332] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.924622] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52944b9a-1b1c-88e9-cb2a-6e11f73e2b4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.265987] env[68282]: DEBUG oslo_concurrency.lockutils [None req-359d61b7-53ef-4095-92c2-958fe87633ad tempest-ServerRescueTestJSONUnderV235-1077845589 tempest-ServerRescueTestJSONUnderV235-1077845589-project-member] Acquiring lock "b112af96-eff4-4b26-9161-deb9ab41afb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.266316] env[68282]: DEBUG oslo_concurrency.lockutils [None req-359d61b7-53ef-4095-92c2-958fe87633ad tempest-ServerRescueTestJSONUnderV235-1077845589 tempest-ServerRescueTestJSONUnderV235-1077845589-project-member] Lock "b112af96-eff4-4b26-9161-deb9ab41afb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.425231] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.425525] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.425757] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.023996] env[68282]: DEBUG nova.compute.manager [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Received event network-changed-b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1347.024264] env[68282]: DEBUG nova.compute.manager [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Refreshing instance network info cache due to event network-changed-b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1347.024607] env[68282]: DEBUG oslo_concurrency.lockutils [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] Acquiring lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.024607] env[68282]: DEBUG oslo_concurrency.lockutils [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] Acquired lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.024750] env[68282]: DEBUG nova.network.neutron [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Refreshing network info cache for port b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1347.442299] env[68282]: DEBUG nova.network.neutron [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Updated VIF entry in instance network info cache for port b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1347.442674] env[68282]: DEBUG nova.network.neutron [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Updating instance_info_cache with network_info: [{"id": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "address": "fa:16:3e:89:87:ab", "network": {"id": "8076d7cb-fc9d-41c1-adca-1523ecc03f80", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1749812871-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95c697736cbe4520872cc8f1c143879b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d72eb9-c4", "ovs_interfaceid": "b9d72eb9-c4f1-4125-a2e7-1f48475ec2ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.452478] env[68282]: DEBUG oslo_concurrency.lockutils [req-52a22655-b8ca-4fcc-a2c9-8adf82e6bf8c req-d242817b-aa85-4de9-8b2c-321934ec113f service nova] Releasing lock "refresh_cache-4340e67d-0b82-4f16-8c49-88886a57523f" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1351.129704] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "4340e67d-0b82-4f16-8c49-88886a57523f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.438254] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f7d6e8c-2388-4a18-aaa3-7469cebe4681 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Acquiring lock "18588d7f-1748-4e42-b91a-83edda89e6ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.438492] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f7d6e8c-2388-4a18-aaa3-7469cebe4681 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Lock "18588d7f-1748-4e42-b91a-83edda89e6ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.019183] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1d73ac8f-5ef4-415d-aa76-cca32895dfc8 tempest-ServerAddressesTestJSON-877632188 tempest-ServerAddressesTestJSON-877632188-project-member] Acquiring lock "e0393ce7-f7d9-470d-8941-4a0ef876202d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.019835] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1d73ac8f-5ef4-415d-aa76-cca32895dfc8 tempest-ServerAddressesTestJSON-877632188 tempest-ServerAddressesTestJSON-877632188-project-member] Lock "e0393ce7-f7d9-470d-8941-4a0ef876202d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.087422] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1375.101272] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.101436] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.101631] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.101789] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1375.102940] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64348127-868a-4d57-944e-6c5d3a264539 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.111820] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283c422a-a1bd-4e61-a6dd-75faed9369cc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.127355] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837e2a1c-17c3-40bd-982b-edd347aaeca3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.133828] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ab5d66-6322-4055-9943-de23f1c35de5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.162582] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180919MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1375.162741] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.162933] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.236658] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.236825] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 50234924-2933-4a79-9a33-3cb968b6e08a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237028] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237165] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237288] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237407] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237522] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237639] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237775] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.237891] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1375.250363] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.260692] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f1e39c16-6bf4-4b22-9bac-b82d176df7f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.270121] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.280399] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.290398] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7c17f658-a502-4e35-a4d4-5b8e37da47c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.299615] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a7576467-3848-400d-925f-0a1a070dbf07 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.309899] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 81d2e3d5-2b11-4c9a-93eb-16e6929aada0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.319117] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.328110] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.337455] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.347146] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 56b8d673-b69b-4f07-9c35-12c9390ed505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.360116] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2f617973-033c-42e5-8451-2a565291d7c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.369632] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d3581e8-19f6-4665-9b3c-a89dfdefb166 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.379575] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 567cf5f0-3e42-4f75-8b8d-978220c161d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.388631] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b112af96-eff4-4b26-9161-deb9ab41afb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.401862] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 18588d7f-1748-4e42-b91a-83edda89e6ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.411942] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e0393ce7-f7d9-470d-8941-4a0ef876202d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1375.412215] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1375.412349] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1375.690013] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1148a706-ff26-4b13-9580-6ed9732d107a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.697703] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2691685-5846-452b-b966-4787c095fe18 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.727029] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121f8b8f-9fc3-4f29-87ee-071159d23968 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.733253] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e224e008-3e1b-4d64-818b-94b33d709892 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.745811] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.754218] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1375.769397] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1375.769588] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.607s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.770990] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.770990] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1381.770990] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1381.793087] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793270] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793407] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793537] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793664] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793789] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.793911] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.796719] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.797338] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.797338] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1381.797338] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1382.087075] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.087075] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.087280] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1383.083850] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.132739] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.132972] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.419080] env[68282]: WARNING oslo_vmware.rw_handles [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1383.419080] env[68282]: ERROR oslo_vmware.rw_handles [ 1383.419603] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1383.421353] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1383.421559] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Copying Virtual Disk [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/2dbc79ae-a0d5-4efe-a942-bfc9e0991372/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1383.421847] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1482b261-642b-493e-b568-d1bc749157aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.430170] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for the task: (returnval){ [ 1383.430170] env[68282]: value = "task-3470542" [ 1383.430170] env[68282]: _type = "Task" [ 1383.430170] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.437893] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Task: {'id': task-3470542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.941057] env[68282]: DEBUG oslo_vmware.exceptions [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1383.941057] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.941413] env[68282]: ERROR nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1383.941413] env[68282]: Faults: ['InvalidArgument'] [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Traceback (most recent call last): [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] yield resources [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self.driver.spawn(context, instance, image_meta, [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self._fetch_image_if_missing(context, vi) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] image_cache(vi, tmp_image_ds_loc) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] vm_util.copy_virtual_disk( [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] session._wait_for_task(vmdk_copy_task) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return self.wait_for_task(task_ref) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return evt.wait() [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] result = hub.switch() [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return self.greenlet.switch() [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self.f(*self.args, **self.kw) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] raise exceptions.translate_fault(task_info.error) [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Faults: ['InvalidArgument'] [ 1383.941413] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] [ 1383.943257] env[68282]: INFO nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Terminating instance [ 1383.943330] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.943592] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1383.944186] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1383.945167] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1383.945167] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-341c673b-9ab9-48e4-879f-ad67b0cfbc05 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.947290] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa86784-9d9b-4d5b-868f-aec6a5e44507 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.954374] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1383.954601] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-109142c0-c917-405a-8421-6f3fbfece07d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.956822] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1383.956998] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1383.957931] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-260c274a-0fc3-4e8b-b81f-beb3826e8d1d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.963418] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Waiting for the task: (returnval){ [ 1383.963418] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52bbbca6-67b9-abab-8c59-84b0cc3055a4" [ 1383.963418] env[68282]: _type = "Task" [ 1383.963418] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.970657] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52bbbca6-67b9-abab-8c59-84b0cc3055a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.023025] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1384.023290] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1384.023517] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Deleting the datastore file [datastore2] 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.023794] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4541cbd8-e343-4c82-a104-f7f204c26f26 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.030519] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for the task: (returnval){ [ 1384.030519] env[68282]: value = "task-3470544" [ 1384.030519] env[68282]: _type = "Task" [ 1384.030519] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.038691] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Task: {'id': task-3470544, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.088194] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.088194] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.473342] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1384.473598] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Creating directory with path [datastore2] vmware_temp/9a3039ff-ec06-491d-812e-c23074d1fd16/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.473824] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-281dc35c-0807-4835-a986-96b787cd3476 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.485421] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Created directory with path [datastore2] vmware_temp/9a3039ff-ec06-491d-812e-c23074d1fd16/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.485566] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Fetch image to [datastore2] vmware_temp/9a3039ff-ec06-491d-812e-c23074d1fd16/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1384.485724] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/9a3039ff-ec06-491d-812e-c23074d1fd16/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1384.486429] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bd2c54-6860-40fe-99d7-44fe6e8208f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.492762] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cac177a-c7ce-4e2e-8009-6e6e1e75aca6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.501314] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5653e6-9cdc-48bd-87ad-a9207ebd06d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.534436] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ff242d-b1ff-4d65-a464-0f288e1e0aba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.541087] env[68282]: DEBUG oslo_vmware.api [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Task: {'id': task-3470544, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069744} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.542475] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1384.542681] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1384.542870] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1384.543059] env[68282]: INFO nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1384.545071] env[68282]: DEBUG nova.compute.claims [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1384.545248] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.545457] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.547936] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-57f9ba1b-36c2-415b-9061-b2d628893f22 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.570527] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1384.777184] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.778797] env[68282]: ERROR nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] yield resources [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.driver.spawn(context, instance, image_meta, [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1384.778797] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._fetch_image_if_missing(context, vi) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image_fetch(context, vi, tmp_image_ds_loc) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] images.fetch_image( [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] metadata = IMAGE_API.get(context, image_ref) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return session.show(context, image_id, [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] _reraise_translated_image_exception(image_id) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise new_exc.with_traceback(exc_trace) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1384.779954] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1384.779954] env[68282]: INFO nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Terminating instance [ 1384.781485] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.781485] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1384.781485] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91aad542-217f-4d77-acd6-eec513410a27 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.783708] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1384.783903] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1384.786632] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b84191-2e76-43c6-8b3d-926cf8991905 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.793574] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1384.793787] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e76d0b98-8e1f-4285-8d04-4068b179fb75 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.796062] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1384.796238] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1384.797318] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5783ebab-cfb6-4c66-8118-e2d580424776 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.804080] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for the task: (returnval){ [ 1384.804080] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c6c520-4d69-fb45-68da-fe8ca70fd5d1" [ 1384.804080] env[68282]: _type = "Task" [ 1384.804080] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.812282] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c6c520-4d69-fb45-68da-fe8ca70fd5d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.862296] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1384.862581] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1384.862700] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Deleting the datastore file [datastore2] 50234924-2933-4a79-9a33-3cb968b6e08a {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.862967] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e005d401-5110-45f9-9d6b-b0567cd07dc2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.871548] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Waiting for the task: (returnval){ [ 1384.871548] env[68282]: value = "task-3470546" [ 1384.871548] env[68282]: _type = "Task" [ 1384.871548] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.879046] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Task: {'id': task-3470546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.925530] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c25e8a-8308-48c9-acce-c98d9cc84203 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.932622] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d98c793-e541-4792-9141-cc35e4f77cc8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.962940] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2986a8d2-7080-4100-8914-8f2498303742 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.969792] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b882f1-6d6e-434f-9743-91410e9aa2d4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.982312] env[68282]: DEBUG nova.compute.provider_tree [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.991761] env[68282]: DEBUG nova.scheduler.client.report [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1385.006619] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.461s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.007154] env[68282]: ERROR nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1385.007154] env[68282]: Faults: ['InvalidArgument'] [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Traceback (most recent call last): [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self.driver.spawn(context, instance, image_meta, [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self._fetch_image_if_missing(context, vi) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] image_cache(vi, tmp_image_ds_loc) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] vm_util.copy_virtual_disk( [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] session._wait_for_task(vmdk_copy_task) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return self.wait_for_task(task_ref) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return evt.wait() [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] result = hub.switch() [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] return self.greenlet.switch() [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] self.f(*self.args, **self.kw) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] raise exceptions.translate_fault(task_info.error) [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Faults: ['InvalidArgument'] [ 1385.007154] env[68282]: ERROR nova.compute.manager [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] [ 1385.008270] env[68282]: DEBUG nova.compute.utils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1385.009184] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Build of instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 was re-scheduled: A specified parameter was not correct: fileType [ 1385.009184] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1385.009559] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1385.009734] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1385.009938] env[68282]: DEBUG nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1385.010302] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1385.087256] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.314779] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1385.315072] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Creating directory with path [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1385.315315] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10af59c7-86c8-4755-9328-ef9aa3cc5c8c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.326300] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Created directory with path [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.326653] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Fetch image to [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1385.326653] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1385.327388] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504393e7-4338-43f9-b247-82421880ac7a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.338980] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ee457b-3f22-4af6-9222-b8fa4089e051 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.351577] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64ed1ab-7947-455c-8270-bd6647d4ad9c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.389749] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c1c1b8-7957-461e-aada-cf9cea4b7632 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.397418] env[68282]: DEBUG oslo_vmware.api [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Task: {'id': task-3470546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075815} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.398894] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.399100] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1385.399312] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1385.399449] env[68282]: INFO nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1385.401299] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e624d430-f17b-4608-b2f6-467a93575149 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.403216] env[68282]: DEBUG nova.compute.claims [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1385.403397] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.403610] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.434447] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1385.502153] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1385.566231] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1385.566499] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1385.730316] env[68282]: DEBUG nova.network.neutron [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.744375] env[68282]: INFO nova.compute.manager [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Took 0.73 seconds to deallocate network for instance. [ 1385.853691] env[68282]: INFO nova.scheduler.client.report [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Deleted allocations for instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 [ 1385.884947] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c6cf668e-d3b0-4dd5-a627-f0d16cd1a6dd tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 654.204s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.886234] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 454.626s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.886456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Acquiring lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.886661] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.886832] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.888763] env[68282]: INFO nova.compute.manager [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Terminating instance [ 1385.890400] env[68282]: DEBUG nova.compute.manager [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1385.890597] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1385.891126] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08f2459d-e50c-4b09-a949-5cfc29928e64 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.896101] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20c6fbc-daec-469d-9422-ed129f14adfe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.902237] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba24597-63c4-4112-aa36-496c5093bf7c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.915689] env[68282]: DEBUG nova.compute.manager [None req-975a55a9-f0f9-4f7e-8dd9-64feba3818f3 tempest-InstanceActionsTestJSON-1920713201 tempest-InstanceActionsTestJSON-1920713201-project-member] [instance: e86e97e0-c191-4f39-9e71-1e99dfbbe65f] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1385.919273] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84f235-7e23-4122-ab94-bf3ee0591cce {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.959657] env[68282]: DEBUG nova.compute.manager [None req-975a55a9-f0f9-4f7e-8dd9-64feba3818f3 tempest-InstanceActionsTestJSON-1920713201 tempest-InstanceActionsTestJSON-1920713201-project-member] [instance: e86e97e0-c191-4f39-9e71-1e99dfbbe65f] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1385.961157] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86582124-0bbb-411c-9953-7b568eddc7e5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.966264] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0 could not be found. [ 1385.966264] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1385.966264] env[68282]: INFO nova.compute.manager [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1385.966264] env[68282]: DEBUG oslo.service.loopingcall [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1385.966264] env[68282]: DEBUG nova.compute.manager [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1385.966264] env[68282]: DEBUG nova.network.neutron [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1385.972063] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e836e57b-450e-4dd1-bd8a-2f2ffc625883 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.987686] env[68282]: DEBUG nova.compute.provider_tree [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.994143] env[68282]: DEBUG oslo_concurrency.lockutils [None req-975a55a9-f0f9-4f7e-8dd9-64feba3818f3 tempest-InstanceActionsTestJSON-1920713201 tempest-InstanceActionsTestJSON-1920713201-project-member] Lock "e86e97e0-c191-4f39-9e71-1e99dfbbe65f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.090s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.996484] env[68282]: DEBUG nova.scheduler.client.report [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1385.999630] env[68282]: DEBUG nova.network.neutron [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.004130] env[68282]: DEBUG nova.compute.manager [None req-9db6281d-c268-4088-b5b6-72adf5b7758f tempest-ServerActionsV293TestJSON-1576633023 tempest-ServerActionsV293TestJSON-1576633023-project-member] [instance: 340f09fe-88a8-4b55-bf56-771d9fe1a14a] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.010896] env[68282]: INFO nova.compute.manager [-] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] Took 0.05 seconds to deallocate network for instance. [ 1386.022152] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.618s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.022882] env[68282]: ERROR nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.driver.spawn(context, instance, image_meta, [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._fetch_image_if_missing(context, vi) [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1386.022882] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image_fetch(context, vi, tmp_image_ds_loc) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] images.fetch_image( [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] metadata = IMAGE_API.get(context, image_ref) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return session.show(context, image_id, [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] _reraise_translated_image_exception(image_id) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise new_exc.with_traceback(exc_trace) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1386.024417] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.024417] env[68282]: DEBUG nova.compute.utils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1386.025805] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Build of instance 50234924-2933-4a79-9a33-3cb968b6e08a was re-scheduled: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1386.025805] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1386.025805] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1386.025805] env[68282]: DEBUG nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1386.025805] env[68282]: DEBUG nova.network.neutron [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1386.030971] env[68282]: DEBUG nova.compute.manager [None req-9db6281d-c268-4088-b5b6-72adf5b7758f tempest-ServerActionsV293TestJSON-1576633023 tempest-ServerActionsV293TestJSON-1576633023-project-member] [instance: 340f09fe-88a8-4b55-bf56-771d9fe1a14a] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1386.056443] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9db6281d-c268-4088-b5b6-72adf5b7758f tempest-ServerActionsV293TestJSON-1576633023 tempest-ServerActionsV293TestJSON-1576633023-project-member] Lock "340f09fe-88a8-4b55-bf56-771d9fe1a14a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.668s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.069809] env[68282]: DEBUG nova.compute.manager [None req-e5f4dc5a-51be-4a09-ac0b-326da94af3a0 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] [instance: 70943b53-2303-4cf8-8e02-95cbf011454c] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.099123] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c8adae41-b27e-47ba-a778-0dd2375ecbf1 tempest-FloatingIPsAssociationTestJSON-1040675667 tempest-FloatingIPsAssociationTestJSON-1040675667-project-member] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.099982] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 102.502s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.100189] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5667b56a-9abd-4e9d-83cf-04be5c8ddbb0] During sync_power_state the instance has a pending task (deleting). Skip. [ 1386.100373] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "5667b56a-9abd-4e9d-83cf-04be5c8ddbb0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.101307] env[68282]: DEBUG nova.compute.manager [None req-e5f4dc5a-51be-4a09-ac0b-326da94af3a0 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] [instance: 70943b53-2303-4cf8-8e02-95cbf011454c] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1386.123205] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e5f4dc5a-51be-4a09-ac0b-326da94af3a0 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Lock "70943b53-2303-4cf8-8e02-95cbf011454c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.584s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.137273] env[68282]: DEBUG nova.compute.manager [None req-c1751f96-f1e9-452f-bcac-a0b5f67ab02b tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: da3a24eb-1e73-4137-b54f-da8077968d78] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.166091] env[68282]: DEBUG nova.compute.manager [None req-c1751f96-f1e9-452f-bcac-a0b5f67ab02b tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] [instance: da3a24eb-1e73-4137-b54f-da8077968d78] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1386.168075] env[68282]: DEBUG neutronclient.v2_0.client [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1386.170565] env[68282]: ERROR nova.compute.manager [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.driver.spawn(context, instance, image_meta, [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._fetch_image_if_missing(context, vi) [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1386.170565] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image_fetch(context, vi, tmp_image_ds_loc) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] images.fetch_image( [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] metadata = IMAGE_API.get(context, image_ref) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return session.show(context, image_id, [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] _reraise_translated_image_exception(image_id) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise new_exc.with_traceback(exc_trace) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = getattr(controller, method)(*args, **kwargs) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._get(image_id) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] resp, body = self.http_client.get(url, headers=header) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.request(url, 'GET', **kwargs) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self._handle_response(resp) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exc.from_response(resp, resp.content) [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._build_and_run_instance(context, instance, image, [ 1386.171630] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exception.RescheduledException( [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.RescheduledException: Build of instance 50234924-2933-4a79-9a33-3cb968b6e08a was re-scheduled: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] exception_handler_v20(status_code, error_body) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise client_exc(message=error_message, [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Neutron server returns request_ids: ['req-8cd1fc6f-3626-424e-9ca8-0dc9e0cc586b'] [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._deallocate_network(context, instance, requested_networks) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.network_api.deallocate_for_instance( [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] data = neutron.list_ports(**search_opts) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.list('ports', self.ports_path, retrieve_all, [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] for r in self._pagination(collection, path, **params): [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1386.172913] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] res = self.get(path, params=params) [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.retry_request("GET", action, body=body, [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.do_request(method, action, body=body, [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._handle_fault_response(status_code, replybody, resp) [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exception.Unauthorized() [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.Unauthorized: Not authorized. [ 1386.174167] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.191313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-c1751f96-f1e9-452f-bcac-a0b5f67ab02b tempest-AttachInterfacesTestJSON-987951229 tempest-AttachInterfacesTestJSON-987951229-project-member] Lock "da3a24eb-1e73-4137-b54f-da8077968d78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.643s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.202964] env[68282]: DEBUG nova.compute.manager [None req-0c01a4cc-23cb-4e12-9af5-11e8da39ab24 tempest-ServerTagsTestJSON-396054325 tempest-ServerTagsTestJSON-396054325-project-member] [instance: d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.228762] env[68282]: DEBUG nova.compute.manager [None req-0c01a4cc-23cb-4e12-9af5-11e8da39ab24 tempest-ServerTagsTestJSON-396054325 tempest-ServerTagsTestJSON-396054325-project-member] [instance: d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1386.234958] env[68282]: INFO nova.scheduler.client.report [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Deleted allocations for instance 50234924-2933-4a79-9a33-3cb968b6e08a [ 1386.249925] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e2c56baa-f5ee-4c52-becf-6ad898005523 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.012s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.250903] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.072s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.251151] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Acquiring lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.251363] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.251531] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.253807] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0c01a4cc-23cb-4e12-9af5-11e8da39ab24 tempest-ServerTagsTestJSON-396054325 tempest-ServerTagsTestJSON-396054325-project-member] Lock "d54b8c08-eb7a-447c-bd10-c5d6ceeb7c5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.159s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.255152] env[68282]: INFO nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Terminating instance [ 1386.256859] env[68282]: DEBUG nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1386.257102] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1386.257593] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01ea7ca2-a637-4dae-9eba-75be68fc5bec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.261874] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.265604] env[68282]: DEBUG nova.compute.manager [None req-19de0116-ef92-437e-ba8b-3ed8543086d3 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: f1e39c16-6bf4-4b22-9bac-b82d176df7f9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.270828] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06406892-8d1d-4093-a6ef-ce98ab81ee7f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.288818] env[68282]: DEBUG nova.compute.manager [None req-19de0116-ef92-437e-ba8b-3ed8543086d3 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: f1e39c16-6bf4-4b22-9bac-b82d176df7f9] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1386.299471] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 50234924-2933-4a79-9a33-3cb968b6e08a could not be found. [ 1386.299471] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1386.299471] env[68282]: INFO nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1386.299694] env[68282]: DEBUG oslo.service.loopingcall [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.299906] env[68282]: DEBUG nova.compute.manager [-] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1386.300011] env[68282]: DEBUG nova.network.neutron [-] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1386.317603] env[68282]: DEBUG oslo_concurrency.lockutils [None req-19de0116-ef92-437e-ba8b-3ed8543086d3 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "f1e39c16-6bf4-4b22-9bac-b82d176df7f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.255s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.320467] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.320467] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.321577] env[68282]: INFO nova.compute.claims [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.328221] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1386.387307] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.451147] env[68282]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1386.451409] env[68282]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-aeb098bb-bff7-4835-a7dc-1433b98dbdd0'] [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1386.452140] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1386.453884] env[68282]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1386.453884] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1386.453884] env[68282]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1386.453884] env[68282]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.453884] env[68282]: ERROR oslo.service.loopingcall [ 1386.453884] env[68282]: ERROR nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.487059] env[68282]: ERROR nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] exception_handler_v20(status_code, error_body) [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise client_exc(message=error_message, [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Neutron server returns request_ids: ['req-aeb098bb-bff7-4835-a7dc-1433b98dbdd0'] [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During handling of the above exception, another exception occurred: [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Traceback (most recent call last): [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._delete_instance(context, instance, bdms) [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._shutdown_instance(context, instance, bdms) [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._try_deallocate_network(context, instance, requested_networks) [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] with excutils.save_and_reraise_exception(): [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.force_reraise() [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise self.value [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] _deallocate_network_with_retries() [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return evt.wait() [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1386.487059] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = hub.switch() [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.greenlet.switch() [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = func(*self.args, **self.kw) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] result = f(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._deallocate_network( [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self.network_api.deallocate_for_instance( [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] data = neutron.list_ports(**search_opts) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.list('ports', self.ports_path, retrieve_all, [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] for r in self._pagination(collection, path, **params): [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] res = self.get(path, params=params) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.retry_request("GET", action, body=body, [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] return self.do_request(method, action, body=body, [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] ret = obj(*args, **kwargs) [ 1386.488228] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1386.489248] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] self._handle_fault_response(status_code, replybody, resp) [ 1386.489248] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1386.489248] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1386.489248] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.489248] env[68282]: ERROR nova.compute.manager [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] [ 1386.522364] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.271s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.523568] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 102.925s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.523751] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1386.523934] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "50234924-2933-4a79-9a33-3cb968b6e08a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.593023] env[68282]: INFO nova.compute.manager [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] [instance: 50234924-2933-4a79-9a33-3cb968b6e08a] Successfully reverted task state from None on failure for instance. [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server [None req-b8e4dbee-7638-4800-92a4-200c5d7c3d51 tempest-ListImageFiltersTestJSON-887310286 tempest-ListImageFiltersTestJSON-887310286-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-aeb098bb-bff7-4835-a7dc-1433b98dbdd0'] [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.601686] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1386.603225] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1386.604970] env[68282]: ERROR oslo_messaging.rpc.server [ 1386.708081] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb699b3d-8dd0-481d-b6a1-e8d0c27f251b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.716309] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f79328-1374-42f6-93df-518da7c3df04 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.754727] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd192556-024c-449a-a5ac-cf3bb07db031 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.762316] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e5aeac-379e-49a8-9be9-86842c73dba0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.775736] env[68282]: DEBUG nova.compute.provider_tree [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1386.785067] env[68282]: DEBUG nova.scheduler.client.report [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1386.799425] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.480s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.799955] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1386.805280] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.415s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.805280] env[68282]: INFO nova.compute.claims [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.854105] env[68282]: DEBUG nova.compute.utils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1386.855382] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1386.870382] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1386.943792] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1386.979248] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1386.979248] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1386.979248] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.979248] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1386.979572] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.979572] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1386.979643] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1386.979855] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1386.979961] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1386.980234] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1386.980467] env[68282]: DEBUG nova.virt.hardware [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1386.981727] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17872e4-2816-4e11-a9b2-e2c05e7c4245 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.992274] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6829189d-e1c3-4b37-9e8d-a718c7e1ab94 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.010053] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1387.014801] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating folder: Project (9ee4738ed0f84a1ab2b773584ffdfa53). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1387.015161] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74741dd7-7378-49b2-95d7-f2b54a17b8ba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.026415] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created folder: Project (9ee4738ed0f84a1ab2b773584ffdfa53) in parent group-v693573. [ 1387.026626] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating folder: Instances. Parent ref: group-v693646. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1387.026958] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-434fe9c7-40e2-4c95-8657-f7027f803910 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.038450] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created folder: Instances in parent group-v693646. [ 1387.038712] env[68282]: DEBUG oslo.service.loopingcall [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.038906] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1387.039136] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-286e77ba-1f2d-404f-9f12-d79c6f34d16a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.059328] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1387.059328] env[68282]: value = "task-3470549" [ 1387.059328] env[68282]: _type = "Task" [ 1387.059328] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.066680] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470549, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.220114] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9524ab-1a01-4333-b384-bb4ea8e6a4b9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.228555] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17745b1-8977-4d44-8f63-eff736dcd4ef {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.258495] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be8fcd2-1b5f-44a0-abc8-5ec71aefc521 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.266142] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe37e7c-fbc5-42f6-98a4-ce1d30bebd23 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.279457] env[68282]: DEBUG nova.compute.provider_tree [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.288540] env[68282]: DEBUG nova.scheduler.client.report [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1387.305305] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.305864] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1387.350482] env[68282]: DEBUG nova.compute.utils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.351852] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1387.360560] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1387.421762] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1387.449660] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1387.449930] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1387.450111] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.450299] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1387.450450] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.450599] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1387.450802] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1387.450963] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1387.451230] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1387.451406] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1387.451579] env[68282]: DEBUG nova.virt.hardware [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1387.452442] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821617d2-ef6b-4e58-b3ed-b41741d575f0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.460634] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83caea1c-eae1-45b1-86ac-a76439579aa1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.473995] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1387.479850] env[68282]: DEBUG oslo.service.loopingcall [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.480090] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1387.480393] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-29da87d8-2a9c-4056-aa1d-e5f9dd40bf08 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.496161] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1387.496161] env[68282]: value = "task-3470550" [ 1387.496161] env[68282]: _type = "Task" [ 1387.496161] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.503304] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470550, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.569144] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470549, 'name': CreateVM_Task, 'duration_secs': 0.288052} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.571273] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1387.571273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.571273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.571273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.571273] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a85f977-fe8c-4dcc-839e-bbb1615fc74b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.575050] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1387.575050] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]520db40f-8faf-e36a-9744-6f59032a277d" [ 1387.575050] env[68282]: _type = "Task" [ 1387.575050] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.582759] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]520db40f-8faf-e36a-9744-6f59032a277d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.006659] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470550, 'name': CreateVM_Task, 'duration_secs': 0.287223} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.006849] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1388.007278] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.087610] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.087896] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.088359] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.088359] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.091019] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1388.091019] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3768b3-8707-45b5-b47b-82dd5e2475ad {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.094880] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1388.094880] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]525e974a-f42c-509d-92db-d02eb10a5427" [ 1388.094880] env[68282]: _type = "Task" [ 1388.094880] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.105839] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]525e974a-f42c-509d-92db-d02eb10a5427, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.606737] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.606737] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1388.606737] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.952695] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.952695] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.579125] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.465705] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6ef0eb15-f223-46ec-80ed-3e4d8df92310 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "3de5e306-7c39-4204-b957-f7c3a97e1c3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.466050] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6ef0eb15-f223-46ec-80ed-3e4d8df92310 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "3de5e306-7c39-4204-b957-f7c3a97e1c3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.426957] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76b45784-11db-41cc-8c28-71f112e4ba6d tempest-ServersListShow296Test-1655116262 tempest-ServersListShow296Test-1655116262-project-member] Acquiring lock "6b2315d5-4134-4be5-b1ce-6b9941b33493" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.427228] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76b45784-11db-41cc-8c28-71f112e4ba6d tempest-ServersListShow296Test-1655116262 tempest-ServersListShow296Test-1655116262-project-member] Lock "6b2315d5-4134-4be5-b1ce-6b9941b33493" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.573834] env[68282]: WARNING oslo_vmware.rw_handles [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1432.573834] env[68282]: ERROR oslo_vmware.rw_handles [ 1432.574379] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1432.576271] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1432.576511] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Copying Virtual Disk [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/895a80ce-7884-4e3c-b7fd-73716b2bf064/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1432.576802] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad5695be-d7e6-4ea6-9b90-7e6dbb59c2af {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.585167] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for the task: (returnval){ [ 1432.585167] env[68282]: value = "task-3470551" [ 1432.585167] env[68282]: _type = "Task" [ 1432.585167] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.593633] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Task: {'id': task-3470551, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.095895] env[68282]: DEBUG oslo_vmware.exceptions [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1433.096273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.096857] env[68282]: ERROR nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1433.096857] env[68282]: Faults: ['InvalidArgument'] [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Traceback (most recent call last): [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] yield resources [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.driver.spawn(context, instance, image_meta, [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._fetch_image_if_missing(context, vi) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] image_cache(vi, tmp_image_ds_loc) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] vm_util.copy_virtual_disk( [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] session._wait_for_task(vmdk_copy_task) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.wait_for_task(task_ref) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return evt.wait() [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] result = hub.switch() [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.greenlet.switch() [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.f(*self.args, **self.kw) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] raise exceptions.translate_fault(task_info.error) [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Faults: ['InvalidArgument'] [ 1433.096857] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] [ 1433.097735] env[68282]: INFO nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Terminating instance [ 1433.098813] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.099039] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.099294] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b0e444c-4a5b-441c-81fd-d4ac0f5ef626 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.101465] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.101626] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.101803] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1433.109074] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.109251] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1433.110471] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71882aa6-b531-476c-af41-16661a613de0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.118210] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for the task: (returnval){ [ 1433.118210] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52cfc425-9cf6-cef0-6afa-24804bbfc003" [ 1433.118210] env[68282]: _type = "Task" [ 1433.118210] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.126008] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52cfc425-9cf6-cef0-6afa-24804bbfc003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.136175] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1433.199553] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.208326] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Releasing lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.208745] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1433.208953] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1433.210032] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf266e9-c5e6-4eaa-9b73-ce25764d6f33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.219804] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1433.220039] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d4a1e7cf-072b-4b42-8353-78e42283fbcf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.272746] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1433.273112] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1433.273385] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Deleting the datastore file [datastore2] a7b5f30a-7ddf-4e8b-b57c-715e41819c29 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1433.273660] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-849c5d65-c2b2-4e52-ad93-9796cd0e9c13 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.280114] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for the task: (returnval){ [ 1433.280114] env[68282]: value = "task-3470553" [ 1433.280114] env[68282]: _type = "Task" [ 1433.280114] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.287900] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Task: {'id': task-3470553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.629583] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1433.629864] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Creating directory with path [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1433.630119] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5b2dcf8-0929-4c86-90c4-91f2d47ba65f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.642666] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Created directory with path [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1433.642885] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Fetch image to [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1433.643132] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1433.643871] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2342cb69-b2a2-4a95-b967-4e8f7d4b3aac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.650951] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76c6000-4132-4bf8-b308-5ef496fb6850 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.660285] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f584c3d-9ca4-4ea0-91f3-9b9fc0e06f09 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.690783] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbc5e68-4da9-4bd5-ac2a-45f1d867a13a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.697119] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71367f37-0773-4a4e-b8fb-30cf11091558 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.717920] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1433.769109] env[68282]: DEBUG oslo_vmware.rw_handles [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1433.832857] env[68282]: DEBUG oslo_vmware.rw_handles [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1433.833064] env[68282]: DEBUG oslo_vmware.rw_handles [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1433.837703] env[68282]: DEBUG oslo_vmware.api [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Task: {'id': task-3470553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043617} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.837986] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1433.838255] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1433.838497] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1433.838731] env[68282]: INFO nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1433.839069] env[68282]: DEBUG oslo.service.loopingcall [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1433.839326] env[68282]: DEBUG nova.compute.manager [-] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1433.841600] env[68282]: DEBUG nova.compute.claims [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1433.841818] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.842089] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.139254] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557d5f5e-67fd-43fc-87e9-39563f1e9ff5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.147298] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215c4fe6-1c5b-41ba-ac00-93c4f2678544 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.177217] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38206cb-76a4-4d21-bd1d-6ddcd96b1603 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.184594] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820f2499-7abe-45a1-bd5f-e3fd90c47227 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.198094] env[68282]: DEBUG nova.compute.provider_tree [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1434.206768] env[68282]: DEBUG nova.scheduler.client.report [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1434.221489] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.379s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.222061] env[68282]: ERROR nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1434.222061] env[68282]: Faults: ['InvalidArgument'] [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Traceback (most recent call last): [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.driver.spawn(context, instance, image_meta, [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._fetch_image_if_missing(context, vi) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] image_cache(vi, tmp_image_ds_loc) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] vm_util.copy_virtual_disk( [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] session._wait_for_task(vmdk_copy_task) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.wait_for_task(task_ref) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return evt.wait() [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] result = hub.switch() [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.greenlet.switch() [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.f(*self.args, **self.kw) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] raise exceptions.translate_fault(task_info.error) [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Faults: ['InvalidArgument'] [ 1434.222061] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] [ 1434.222774] env[68282]: DEBUG nova.compute.utils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1434.224244] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Build of instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 was re-scheduled: A specified parameter was not correct: fileType [ 1434.224244] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1434.224621] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1434.224916] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.225013] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.225199] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1434.254698] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1434.329221] env[68282]: DEBUG nova.network.neutron [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.338594] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Releasing lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.338832] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1434.339029] env[68282]: DEBUG nova.compute.manager [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1434.425672] env[68282]: INFO nova.scheduler.client.report [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Deleted allocations for instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 [ 1434.442886] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a81e9399-d9e1-4b65-b975-ed6f5ee81e1c tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.271s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.444013] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.027s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.444247] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.444452] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.444617] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.446564] env[68282]: INFO nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Terminating instance [ 1434.448259] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquiring lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.448416] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Acquired lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.448645] env[68282]: DEBUG nova.network.neutron [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1434.457857] env[68282]: DEBUG nova.compute.manager [None req-f3e94041-9e33-4906-9cd1-e100cd993789 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: 7c17f658-a502-4e35-a4d4-5b8e37da47c5] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1434.476125] env[68282]: DEBUG nova.network.neutron [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1434.487676] env[68282]: DEBUG nova.compute.manager [None req-f3e94041-9e33-4906-9cd1-e100cd993789 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: 7c17f658-a502-4e35-a4d4-5b8e37da47c5] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1434.507334] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f3e94041-9e33-4906-9cd1-e100cd993789 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "7c17f658-a502-4e35-a4d4-5b8e37da47c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.649s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.515785] env[68282]: DEBUG nova.compute.manager [None req-963c7643-83d4-40c8-86fb-ddfa340b7b56 tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] [instance: a7576467-3848-400d-925f-0a1a070dbf07] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1434.537997] env[68282]: DEBUG nova.network.neutron [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.539596] env[68282]: DEBUG nova.compute.manager [None req-963c7643-83d4-40c8-86fb-ddfa340b7b56 tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] [instance: a7576467-3848-400d-925f-0a1a070dbf07] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1434.546060] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Releasing lock "refresh_cache-a7b5f30a-7ddf-4e8b-b57c-715e41819c29" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.546372] env[68282]: DEBUG nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1434.546564] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1434.547438] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-806823af-2f3e-49de-83db-e053836771d1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.558699] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613d0ffa-32d3-40f7-9e16-b97123e6a8f0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.569671] env[68282]: DEBUG oslo_concurrency.lockutils [None req-963c7643-83d4-40c8-86fb-ddfa340b7b56 tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Lock "a7576467-3848-400d-925f-0a1a070dbf07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.941s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.589426] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7b5f30a-7ddf-4e8b-b57c-715e41819c29 could not be found. [ 1434.589637] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1434.589816] env[68282]: INFO nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1434.590069] env[68282]: DEBUG oslo.service.loopingcall [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.590809] env[68282]: DEBUG nova.compute.manager [-] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1434.590899] env[68282]: DEBUG nova.network.neutron [-] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1434.592613] env[68282]: DEBUG nova.compute.manager [None req-34e12bd0-c93e-4a04-a568-d35c94c7388b tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] [instance: 81d2e3d5-2b11-4c9a-93eb-16e6929aada0] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1434.618299] env[68282]: DEBUG nova.compute.manager [None req-34e12bd0-c93e-4a04-a568-d35c94c7388b tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] [instance: 81d2e3d5-2b11-4c9a-93eb-16e6929aada0] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1434.640866] env[68282]: DEBUG oslo_concurrency.lockutils [None req-34e12bd0-c93e-4a04-a568-d35c94c7388b tempest-ServerRescueNegativeTestJSON-1884232739 tempest-ServerRescueNegativeTestJSON-1884232739-project-member] Lock "81d2e3d5-2b11-4c9a-93eb-16e6929aada0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.580s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.651598] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1434.701241] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.701499] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.703044] env[68282]: INFO nova.compute.claims [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.748895] env[68282]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1434.749219] env[68282]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4b929a1f-8ac2-4148-9066-7e4065141d64'] [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1434.749816] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1434.750835] env[68282]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1434.750835] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1434.750835] env[68282]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1434.750835] env[68282]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.750835] env[68282]: ERROR oslo.service.loopingcall [ 1434.750835] env[68282]: ERROR nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.783918] env[68282]: ERROR nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Traceback (most recent call last): [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] exception_handler_v20(status_code, error_body) [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] raise client_exc(message=error_message, [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Neutron server returns request_ids: ['req-4b929a1f-8ac2-4148-9066-7e4065141d64'] [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] During handling of the above exception, another exception occurred: [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Traceback (most recent call last): [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._delete_instance(context, instance, bdms) [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._shutdown_instance(context, instance, bdms) [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._try_deallocate_network(context, instance, requested_networks) [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] with excutils.save_and_reraise_exception(): [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.force_reraise() [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] raise self.value [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] _deallocate_network_with_retries() [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return evt.wait() [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1434.783918] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] result = hub.switch() [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.greenlet.switch() [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] result = func(*self.args, **self.kw) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] result = f(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._deallocate_network( [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self.network_api.deallocate_for_instance( [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] data = neutron.list_ports(**search_opts) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.list('ports', self.ports_path, retrieve_all, [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] for r in self._pagination(collection, path, **params): [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] res = self.get(path, params=params) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.retry_request("GET", action, body=body, [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] return self.do_request(method, action, body=body, [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] ret = obj(*args, **kwargs) [ 1434.784935] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1434.785774] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] self._handle_fault_response(status_code, replybody, resp) [ 1434.785774] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1434.785774] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1434.785774] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.785774] env[68282]: ERROR nova.compute.manager [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] [ 1434.811313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.367s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.812396] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 151.214s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.812583] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] During sync_power_state the instance has a pending task (deleting). Skip. [ 1434.812755] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a7b5f30a-7ddf-4e8b-b57c-715e41819c29" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.861039] env[68282]: INFO nova.compute.manager [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] [instance: a7b5f30a-7ddf-4e8b-b57c-715e41819c29] Successfully reverted task state from None on failure for instance. [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server [None req-d817e56e-72f0-4109-9710-a0dfc64577e3 tempest-ServersAdmin275Test-1535174061 tempest-ServersAdmin275Test-1535174061-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4b929a1f-8ac2-4148-9066-7e4065141d64'] [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.867076] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1434.868229] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1434.869310] env[68282]: ERROR oslo_messaging.rpc.server [ 1435.007011] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bb540e-91ba-4db0-bdda-d037bb7f18c1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.016375] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4702c16-ea6a-4e7e-a471-e837df5ad57a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.045495] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c5d7b5-68ec-45fc-b32f-cfe9ebb72a43 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.052897] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cad651-efff-4944-8ee5-1a3b89a99f3c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.065620] env[68282]: DEBUG nova.compute.provider_tree [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.074026] env[68282]: DEBUG nova.scheduler.client.report [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1435.086981] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.088122] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.088592] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1435.095419] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.095621] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.095782] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.095952] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1435.096924] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6cf35c-4ddf-4a62-b82b-09e365c15a35 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.105164] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc06a0a-7945-4a7b-815b-dfb69c0cb8f1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.119076] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09112e30-f0a2-4ea3-b032-38b76be2f9b6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.122988] env[68282]: DEBUG nova.compute.utils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1435.125474] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1435.125685] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1435.130780] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdeabef4-eda2-43d3-a57f-ffc32d399405 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.165143] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180896MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1435.165352] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.165537] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.167477] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1435.213348] env[68282]: DEBUG nova.policy [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11d7117fb5b444b5b5a417b17b91c6a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd776412fa37145d2b8f9477fbe18087e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1435.237658] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1435.241500] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 66243637-f1f4-4c60-b12a-bbe30c423630 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.241645] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.241744] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.241884] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242009] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242140] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242259] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242376] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242491] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.242605] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1435.253558] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.264235] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.267082] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1435.267305] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1435.267459] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1435.267637] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1435.267780] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1435.267922] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1435.269091] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1435.269091] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1435.269091] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1435.269091] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1435.269091] env[68282]: DEBUG nova.virt.hardware [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1435.270783] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af9df4b-1648-40f5-8de0-7274a8a52aff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.275332] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 56b8d673-b69b-4f07-9c35-12c9390ed505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.279368] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0420dcf-0aed-4e31-bf8d-d2023ead1180 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.285873] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2f617973-033c-42e5-8451-2a565291d7c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.296239] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d3581e8-19f6-4665-9b3c-a89dfdefb166 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.306600] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 567cf5f0-3e42-4f75-8b8d-978220c161d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.315896] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b112af96-eff4-4b26-9161-deb9ab41afb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.325743] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 18588d7f-1748-4e42-b91a-83edda89e6ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.336183] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e0393ce7-f7d9-470d-8941-4a0ef876202d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.345893] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.357358] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3de5e306-7c39-4204-b957-f7c3a97e1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.367140] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6b2315d5-4134-4be5-b1ce-6b9941b33493 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1435.367405] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1435.367561] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1435.613181] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Successfully created port: 0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.643946] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10154224-a9e4-4d51-b85b-ad443616fef8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.649783] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d484413d-ac30-4c60-af87-e94f00866d9c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.680299] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f575456-98df-465a-81c5-a7ad8b393065 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.687639] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc13ce8-060e-4da8-904a-d91a6796d932 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.702840] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1435.713465] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1435.734405] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1435.734606] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.569s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.368293] env[68282]: DEBUG nova.compute.manager [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Received event network-vif-plugged-0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1436.368511] env[68282]: DEBUG oslo_concurrency.lockutils [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] Acquiring lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.368731] env[68282]: DEBUG oslo_concurrency.lockutils [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.368903] env[68282]: DEBUG oslo_concurrency.lockutils [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.369219] env[68282]: DEBUG nova.compute.manager [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] No waiting events found dispatching network-vif-plugged-0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1436.369420] env[68282]: WARNING nova.compute.manager [req-cbbffbb5-91fa-4612-a65a-4fb203e9797d req-dc38d212-3842-4dcc-b9f0-f14a24b9f775 service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Received unexpected event network-vif-plugged-0753f5ca-57ad-416c-b60e-b04678873f1b for instance with vm_state building and task_state spawning. [ 1436.475562] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Successfully updated port: 0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1436.490670] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.490832] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquired lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.490985] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1436.564948] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1436.994223] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Updating instance_info_cache with network_info: [{"id": "0753f5ca-57ad-416c-b60e-b04678873f1b", "address": "fa:16:3e:4d:be:8a", "network": {"id": "2675bb7e-9282-419b-b193-7dc17cdf62f6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-512931750-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d776412fa37145d2b8f9477fbe18087e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0753f5ca-57", "ovs_interfaceid": "0753f5ca-57ad-416c-b60e-b04678873f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.012873] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Releasing lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.013226] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance network_info: |[{"id": "0753f5ca-57ad-416c-b60e-b04678873f1b", "address": "fa:16:3e:4d:be:8a", "network": {"id": "2675bb7e-9282-419b-b193-7dc17cdf62f6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-512931750-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d776412fa37145d2b8f9477fbe18087e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0753f5ca-57", "ovs_interfaceid": "0753f5ca-57ad-416c-b60e-b04678873f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1437.013641] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:be:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0753f5ca-57ad-416c-b60e-b04678873f1b', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.021782] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Creating folder: Project (d776412fa37145d2b8f9477fbe18087e). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1437.022443] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac1b191c-5800-435b-89df-f3042246368f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.036017] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Created folder: Project (d776412fa37145d2b8f9477fbe18087e) in parent group-v693573. [ 1437.036305] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Creating folder: Instances. Parent ref: group-v693650. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1437.036552] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc7aa67e-2bd3-4b01-bae5-fa6f4d375656 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.045480] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Created folder: Instances in parent group-v693650. [ 1437.045743] env[68282]: DEBUG oslo.service.loopingcall [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.045933] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1437.046175] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50f841ab-45f2-40df-b04b-cee477139d2d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.067808] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.067808] env[68282]: value = "task-3470556" [ 1437.067808] env[68282]: _type = "Task" [ 1437.067808] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.076673] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470556, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.577187] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470556, 'name': CreateVM_Task, 'duration_secs': 0.295194} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.577345] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1437.577994] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.578177] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.578488] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1437.578730] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0ce94a2-1242-4ccf-aa4a-2317f84011b5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.583071] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for the task: (returnval){ [ 1437.583071] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ced188-4773-3c89-6344-deeae171f8a8" [ 1437.583071] env[68282]: _type = "Task" [ 1437.583071] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.590345] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ced188-4773-3c89-6344-deeae171f8a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.094016] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.094308] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.094529] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.394844] env[68282]: DEBUG nova.compute.manager [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Received event network-changed-0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1438.395055] env[68282]: DEBUG nova.compute.manager [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Refreshing instance network info cache due to event network-changed-0753f5ca-57ad-416c-b60e-b04678873f1b. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1438.395315] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] Acquiring lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.395528] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] Acquired lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.395707] env[68282]: DEBUG nova.network.neutron [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Refreshing network info cache for port 0753f5ca-57ad-416c-b60e-b04678873f1b {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1438.736730] env[68282]: DEBUG nova.network.neutron [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Updated VIF entry in instance network info cache for port 0753f5ca-57ad-416c-b60e-b04678873f1b. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1438.737110] env[68282]: DEBUG nova.network.neutron [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Updating instance_info_cache with network_info: [{"id": "0753f5ca-57ad-416c-b60e-b04678873f1b", "address": "fa:16:3e:4d:be:8a", "network": {"id": "2675bb7e-9282-419b-b193-7dc17cdf62f6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-512931750-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d776412fa37145d2b8f9477fbe18087e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e5d88cd9-35a3-4ac3-9d6d-756464cd6cc5", "external-id": "nsx-vlan-transportzone-685", "segmentation_id": 685, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0753f5ca-57", "ovs_interfaceid": "0753f5ca-57ad-416c-b60e-b04678873f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.747369] env[68282]: DEBUG oslo_concurrency.lockutils [req-8a43ad8a-a84d-45dc-af5b-2ce3907bb852 req-8cc005da-702d-41ff-8d53-e2e8f3061e8d service nova] Releasing lock "refresh_cache-16824286-3e71-4f49-8a6e-93f10ec668d6" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.735552] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.735840] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1442.735878] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1442.758908] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759089] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759229] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759361] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759489] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759615] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759733] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759852] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.759972] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.760106] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1442.760254] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1443.086652] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.086892] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.087058] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1444.083080] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.086750] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.087389] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.087715] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.087313] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.334290] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "16824286-3e71-4f49-8a6e-93f10ec668d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.963923] env[68282]: WARNING oslo_vmware.rw_handles [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1480.963923] env[68282]: ERROR oslo_vmware.rw_handles [ 1480.964581] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1480.966507] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1480.966789] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Copying Virtual Disk [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/9c7e8ed1-53b5-427c-8e0f-70f954399266/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1480.967091] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fadd248-2c8c-47cc-9a49-ad4f3292303a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.975462] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for the task: (returnval){ [ 1480.975462] env[68282]: value = "task-3470557" [ 1480.975462] env[68282]: _type = "Task" [ 1480.975462] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.983492] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Task: {'id': task-3470557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.485862] env[68282]: DEBUG oslo_vmware.exceptions [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1481.486164] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.486750] env[68282]: ERROR nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.486750] env[68282]: Faults: ['InvalidArgument'] [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Traceback (most recent call last): [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] yield resources [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self.driver.spawn(context, instance, image_meta, [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self._fetch_image_if_missing(context, vi) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] image_cache(vi, tmp_image_ds_loc) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] vm_util.copy_virtual_disk( [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] session._wait_for_task(vmdk_copy_task) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return self.wait_for_task(task_ref) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return evt.wait() [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] result = hub.switch() [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return self.greenlet.switch() [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self.f(*self.args, **self.kw) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] raise exceptions.translate_fault(task_info.error) [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Faults: ['InvalidArgument'] [ 1481.486750] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] [ 1481.487685] env[68282]: INFO nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Terminating instance [ 1481.488651] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.488855] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1481.489108] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee78e63b-af96-46d5-94fc-87c2dda7a240 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.493571] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1481.496011] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1481.496011] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc29288e-b641-49fe-a42b-0f6318c09db4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.497997] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1481.498194] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1481.499209] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67534034-293e-4223-9c63-090876a195c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.503480] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1481.503969] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45322882-a07a-4540-9e86-fded00902201 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.506503] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1481.506503] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52606173-c417-4acc-a039-40fa54499978" [ 1481.506503] env[68282]: _type = "Task" [ 1481.506503] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.518287] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52606173-c417-4acc-a039-40fa54499978, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.571657] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1481.571876] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1481.572069] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Deleting the datastore file [datastore2] 66243637-f1f4-4c60-b12a-bbe30c423630 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1481.572334] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69e832e2-b06a-4c14-94f9-a2b9344e7c9b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.578163] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for the task: (returnval){ [ 1481.578163] env[68282]: value = "task-3470559" [ 1481.578163] env[68282]: _type = "Task" [ 1481.578163] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.585663] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Task: {'id': task-3470559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.016748] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1482.017052] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1482.017278] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-011ca030-8970-4fe0-81ef-3e8247f6be16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.038554] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1482.038754] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Fetch image to [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1482.038929] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1482.039681] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7faebabc-6832-4b4c-9b1d-eaa65aa8704a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.046150] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afffe2c-a9d7-4f29-ae78-2ff5d0bc0a6c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.055052] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b07863-dfaf-4b1e-9e2f-983ca28a0fae {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.088544] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75910976-b61a-4638-a3cb-3de5fc13f863 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.095161] env[68282]: DEBUG oslo_vmware.api [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Task: {'id': task-3470559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078024} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.096618] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1482.096811] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1482.096985] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1482.097173] env[68282]: INFO nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1482.098906] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-28581bdb-a2b0-436e-b60e-f72cafcf1dc7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.100728] env[68282]: DEBUG nova.compute.claims [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1482.100905] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.101138] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.123616] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1482.180265] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1482.242419] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1482.242617] env[68282]: DEBUG oslo_vmware.rw_handles [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1482.430180] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39adfef-a399-49d8-853e-da2079c9cdcc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.438427] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a9bb7c-b563-4889-be40-2375be82e38e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.467178] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa0f9fa4-6342-43fe-a66e-e4ded5737a16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.473773] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3583dedf-494f-4592-bb6b-c2b32071fb9e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.486269] env[68282]: DEBUG nova.compute.provider_tree [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.495800] env[68282]: DEBUG nova.scheduler.client.report [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1482.509498] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.408s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.510021] env[68282]: ERROR nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.510021] env[68282]: Faults: ['InvalidArgument'] [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Traceback (most recent call last): [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self.driver.spawn(context, instance, image_meta, [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self._fetch_image_if_missing(context, vi) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] image_cache(vi, tmp_image_ds_loc) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] vm_util.copy_virtual_disk( [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] session._wait_for_task(vmdk_copy_task) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return self.wait_for_task(task_ref) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return evt.wait() [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] result = hub.switch() [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] return self.greenlet.switch() [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] self.f(*self.args, **self.kw) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] raise exceptions.translate_fault(task_info.error) [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Faults: ['InvalidArgument'] [ 1482.510021] env[68282]: ERROR nova.compute.manager [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] [ 1482.510912] env[68282]: DEBUG nova.compute.utils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1482.512078] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Build of instance 66243637-f1f4-4c60-b12a-bbe30c423630 was re-scheduled: A specified parameter was not correct: fileType [ 1482.512078] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1482.512457] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1482.512636] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1482.512820] env[68282]: DEBUG nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1482.512987] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1483.177746] env[68282]: DEBUG nova.network.neutron [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.201148] env[68282]: INFO nova.compute.manager [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Took 0.69 seconds to deallocate network for instance. [ 1483.305551] env[68282]: INFO nova.scheduler.client.report [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Deleted allocations for instance 66243637-f1f4-4c60-b12a-bbe30c423630 [ 1483.328029] env[68282]: DEBUG oslo_concurrency.lockutils [None req-7238f89b-06c8-4904-a16e-eb4ca9176611 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.044s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.329118] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.408s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.329351] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Acquiring lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.329560] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.329725] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.331894] env[68282]: INFO nova.compute.manager [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Terminating instance [ 1483.333621] env[68282]: DEBUG nova.compute.manager [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1483.333820] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1483.334569] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33325752-839f-40f7-b9ed-4bbef0b7cacf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.343293] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7670e18-9710-4538-b69d-68748af72831 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.354562] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1483.375156] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66243637-f1f4-4c60-b12a-bbe30c423630 could not be found. [ 1483.375404] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1483.375545] env[68282]: INFO nova.compute.manager [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1483.375816] env[68282]: DEBUG oslo.service.loopingcall [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1483.376054] env[68282]: DEBUG nova.compute.manager [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1483.376152] env[68282]: DEBUG nova.network.neutron [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1483.400073] env[68282]: DEBUG nova.network.neutron [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.403092] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.403092] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.404488] env[68282]: INFO nova.compute.claims [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.408092] env[68282]: INFO nova.compute.manager [-] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] Took 0.03 seconds to deallocate network for instance. [ 1483.509245] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6aeab016-1269-4411-8f38-4c7c0185a112 tempest-AttachInterfacesV270Test-1796980287 tempest-AttachInterfacesV270Test-1796980287-project-member] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.510448] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 199.911s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.510448] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 66243637-f1f4-4c60-b12a-bbe30c423630] During sync_power_state the instance has a pending task (deleting). Skip. [ 1483.510584] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "66243637-f1f4-4c60-b12a-bbe30c423630" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.677777] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cca7c4-bb4d-4e60-81a2-bac5d1d65f44 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.685145] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34da3f84-64a2-4da6-a1ba-c2bf72599fea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.715111] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dda0210-c646-48d4-a7e7-d7ddd12e82b8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.721707] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8ef64a-fc40-40d5-88cc-f48c860bb342 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.734221] env[68282]: DEBUG nova.compute.provider_tree [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1483.742374] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1483.756571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.757018] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1483.787573] env[68282]: DEBUG nova.compute.utils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.788907] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1483.789094] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1483.798653] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1483.851138] env[68282]: DEBUG nova.policy [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aee8a1e035742e0b67873bfcce2ef72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3d2a3fac4b04f8fa6622043de5e500d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1483.858859] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1483.883762] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1483.884024] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1483.884190] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1483.884374] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1483.884582] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1483.884791] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1483.885044] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1483.885225] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1483.885454] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1483.885619] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1483.885791] env[68282]: DEBUG nova.virt.hardware [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1483.886730] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25b8db1-3d1e-4ab4-9efb-17dc79ae1152 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.894467] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbc964a-a911-49e5-8633-bbfcb6ce0c64 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.366110] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Successfully created port: 08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.462404] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Successfully updated port: 08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1485.477898] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.477898] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.478029] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1485.539981] env[68282]: DEBUG nova.compute.manager [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Received event network-vif-plugged-08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1485.540232] env[68282]: DEBUG oslo_concurrency.lockutils [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] Acquiring lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.540442] env[68282]: DEBUG oslo_concurrency.lockutils [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.540610] env[68282]: DEBUG oslo_concurrency.lockutils [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.540783] env[68282]: DEBUG nova.compute.manager [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] No waiting events found dispatching network-vif-plugged-08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1485.540949] env[68282]: WARNING nova.compute.manager [req-298e6fc5-da97-492b-8bf5-001cb4372007 req-b91646f3-900d-4be4-a133-44fe082e2ba1 service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Received unexpected event network-vif-plugged-08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 for instance with vm_state building and task_state spawning. [ 1485.549920] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1485.733543] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Updating instance_info_cache with network_info: [{"id": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "address": "fa:16:3e:8d:98:36", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ecd02e-27", "ovs_interfaceid": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.748294] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.748594] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance network_info: |[{"id": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "address": "fa:16:3e:8d:98:36", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ecd02e-27", "ovs_interfaceid": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1485.749056] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:98:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08ecd02e-27c3-47a8-a07e-61d7dd79c6a2', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1485.757116] env[68282]: DEBUG oslo.service.loopingcall [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.757581] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1485.757817] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b97975f-6828-4dfa-aa9d-e4712c337d64 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.779068] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1485.779068] env[68282]: value = "task-3470560" [ 1485.779068] env[68282]: _type = "Task" [ 1485.779068] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.786284] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.290402] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task} progress is 25%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.790210] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task} progress is 25%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.291359] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task} progress is 99%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.670832] env[68282]: DEBUG nova.compute.manager [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Received event network-changed-08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1487.671061] env[68282]: DEBUG nova.compute.manager [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Refreshing instance network info cache due to event network-changed-08ecd02e-27c3-47a8-a07e-61d7dd79c6a2. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1487.671278] env[68282]: DEBUG oslo_concurrency.lockutils [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] Acquiring lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.671465] env[68282]: DEBUG oslo_concurrency.lockutils [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] Acquired lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.671577] env[68282]: DEBUG nova.network.neutron [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Refreshing network info cache for port 08ecd02e-27c3-47a8-a07e-61d7dd79c6a2 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1487.793092] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task} progress is 99%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.292585] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470560, 'name': CreateVM_Task, 'duration_secs': 2.130542} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.293413] env[68282]: DEBUG nova.network.neutron [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Updated VIF entry in instance network info cache for port 08ecd02e-27c3-47a8-a07e-61d7dd79c6a2. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1488.293733] env[68282]: DEBUG nova.network.neutron [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Updating instance_info_cache with network_info: [{"id": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "address": "fa:16:3e:8d:98:36", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08ecd02e-27", "ovs_interfaceid": "08ecd02e-27c3-47a8-a07e-61d7dd79c6a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.294838] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1488.295524] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.295732] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.296059] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1488.296525] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce644a1-5860-4996-abb3-be061532ac88 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.300993] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1488.300993] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ba6ba1-297c-076a-630c-b8dd53a04929" [ 1488.300993] env[68282]: _type = "Task" [ 1488.300993] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.304640] env[68282]: DEBUG oslo_concurrency.lockutils [req-fe0a6021-de20-4958-b700-c6ca3e45a455 req-9e3fc2a6-1c3f-4524-9885-664e5b0201fe service nova] Releasing lock "refresh_cache-9874370f-917a-412b-91ce-a92e73d6ac0d" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.310303] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ba6ba1-297c-076a-630c-b8dd53a04929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.813642] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.813904] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.813971] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.087669] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1496.101124] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.102204] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.102204] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.102204] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1496.104729] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039abbe2-078d-40eb-93c9-9ed52c9ebc2a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.113022] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7117a6-43d0-4c64-9adb-080defb3af14 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.127063] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3aeedfb-65fe-40b6-9276-e2bb68feee32 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.133250] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b0a7c3-e73c-40e5-8812-0abe36711bca {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.161873] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180910MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1496.162034] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.163400] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.237186] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance eeb7149b-8d07-4968-9089-d6278c4565e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.237352] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.237482] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.237651] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.237785] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.237908] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.238038] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.238158] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.238273] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.238387] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.249097] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.260133] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 56b8d673-b69b-4f07-9c35-12c9390ed505 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.269239] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2f617973-033c-42e5-8451-2a565291d7c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.278627] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d3581e8-19f6-4665-9b3c-a89dfdefb166 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.288133] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 567cf5f0-3e42-4f75-8b8d-978220c161d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.296905] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b112af96-eff4-4b26-9161-deb9ab41afb8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.306199] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 18588d7f-1748-4e42-b91a-83edda89e6ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.316681] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e0393ce7-f7d9-470d-8941-4a0ef876202d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.325716] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.334862] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3de5e306-7c39-4204-b957-f7c3a97e1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.345814] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6b2315d5-4134-4be5-b1ce-6b9941b33493 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1496.345814] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1496.345814] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1496.564307] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35753a8c-9398-4769-a6ce-ae81c3655876 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.571799] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713d40fe-9fdc-4050-9a62-87bad0daa45c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.600512] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539efd4f-3ded-4207-a862-b0c3df3217a5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.606854] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046aedf9-e884-4b53-b345-588639deb1c7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.619673] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.627848] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1496.642235] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1496.642416] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.480s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.642632] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.642911] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1503.643042] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1503.664769] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.664952] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665130] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665297] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665490] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665664] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665816] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.665925] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.666075] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.666261] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1503.666352] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1503.666930] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.667101] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1504.087910] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.082402] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.087095] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.088799] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.088799] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.088799] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.084365] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.443894] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "7bc5117e-58d1-4c08-b778-7045b1076b94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.444206] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.641458] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "9874370f-917a-412b-91ce-a92e73d6ac0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.952479] env[68282]: WARNING oslo_vmware.rw_handles [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1530.952479] env[68282]: ERROR oslo_vmware.rw_handles [ 1530.953056] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1530.954827] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1530.955111] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Copying Virtual Disk [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/11104543-4215-400f-a409-48bc590a4801/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1530.955473] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dd86c50-0fc8-46f2-abb3-afb7c728bbbc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.966166] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1530.966166] env[68282]: value = "task-3470561" [ 1530.966166] env[68282]: _type = "Task" [ 1530.966166] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.974785] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.476788] env[68282]: DEBUG oslo_vmware.exceptions [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1531.477102] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.477654] env[68282]: ERROR nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1531.477654] env[68282]: Faults: ['InvalidArgument'] [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Traceback (most recent call last): [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] yield resources [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self.driver.spawn(context, instance, image_meta, [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self._fetch_image_if_missing(context, vi) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] image_cache(vi, tmp_image_ds_loc) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] vm_util.copy_virtual_disk( [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] session._wait_for_task(vmdk_copy_task) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return self.wait_for_task(task_ref) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return evt.wait() [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] result = hub.switch() [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return self.greenlet.switch() [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self.f(*self.args, **self.kw) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] raise exceptions.translate_fault(task_info.error) [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Faults: ['InvalidArgument'] [ 1531.477654] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] [ 1531.478623] env[68282]: INFO nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Terminating instance [ 1531.479547] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.479769] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1531.480424] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1531.480619] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1531.480847] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cae29a1-4727-4214-a844-124130669926 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.483333] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1531b5-b18b-43c1-b828-3bf713fe8827 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.490125] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1531.490362] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-873da595-297f-4d55-b38f-1e19f2c1ff5b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.492433] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1531.492610] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1531.493521] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ab8d72c-fbe3-42cf-a611-5ee9e025b955 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.499401] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1531.499401] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ff7712-8ac9-591b-c685-75e294587954" [ 1531.499401] env[68282]: _type = "Task" [ 1531.499401] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.507259] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ff7712-8ac9-591b-c685-75e294587954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.558069] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1531.558288] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1531.558467] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleting the datastore file [datastore2] eeb7149b-8d07-4968-9089-d6278c4565e5 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.558726] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43dfe08b-85e9-413d-9540-83f85887280d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.564378] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1531.564378] env[68282]: value = "task-3470563" [ 1531.564378] env[68282]: _type = "Task" [ 1531.564378] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.571610] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.012100] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1532.012100] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.012100] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdc6ff11-ef4c-4c2b-b882-88e00627bc40 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.023027] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.023027] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Fetch image to [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1532.023027] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1532.023027] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0fb349-c9a4-4acb-9a2e-6b6485115039 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.030401] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9f340a-3ceb-4bd2-8c84-5114f2381629 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.039497] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c79262c-f270-4c6d-a85d-92c973c55003 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.072526] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efb0141-cc9d-4096-89db-d26a4b2c8a4b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.079543] env[68282]: DEBUG oslo_vmware.api [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071831} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.081124] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.081432] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1532.081751] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1532.082066] env[68282]: INFO nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1532.083923] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3ef8ee29-7df6-481d-9291-edfb14f19738 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.085865] env[68282]: DEBUG nova.compute.claims [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1532.086166] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.086488] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.111245] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1532.167900] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1532.231934] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1532.232172] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1532.442174] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1811a9c2-3166-4b1c-8c23-cd484dc7d7a7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.449560] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d8b9a0-353a-440c-8724-2644c199fab1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.479634] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64be0895-1191-460c-b9c4-4362f77f10e7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.486205] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8f4856-193b-4a35-8ef0-795f541b9a3a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.498791] env[68282]: DEBUG nova.compute.provider_tree [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.506982] env[68282]: DEBUG nova.scheduler.client.report [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1532.521112] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.435s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.521636] env[68282]: ERROR nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1532.521636] env[68282]: Faults: ['InvalidArgument'] [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Traceback (most recent call last): [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self.driver.spawn(context, instance, image_meta, [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self._fetch_image_if_missing(context, vi) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] image_cache(vi, tmp_image_ds_loc) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] vm_util.copy_virtual_disk( [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] session._wait_for_task(vmdk_copy_task) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return self.wait_for_task(task_ref) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return evt.wait() [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] result = hub.switch() [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] return self.greenlet.switch() [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] self.f(*self.args, **self.kw) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] raise exceptions.translate_fault(task_info.error) [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Faults: ['InvalidArgument'] [ 1532.521636] env[68282]: ERROR nova.compute.manager [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] [ 1532.522443] env[68282]: DEBUG nova.compute.utils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1532.525805] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Build of instance eeb7149b-8d07-4968-9089-d6278c4565e5 was re-scheduled: A specified parameter was not correct: fileType [ 1532.525805] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1532.525805] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1532.525805] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1532.525805] env[68282]: DEBUG nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1532.525805] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1532.809440] env[68282]: DEBUG nova.network.neutron [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.822987] env[68282]: INFO nova.compute.manager [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Took 0.30 seconds to deallocate network for instance. [ 1532.933144] env[68282]: INFO nova.scheduler.client.report [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted allocations for instance eeb7149b-8d07-4968-9089-d6278c4565e5 [ 1532.957958] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2ed8677d-d9b5-4bd7-a560-f60f0d1482fd tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.341s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.959149] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 398.197s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.959451] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.959574] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.959740] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.963734] env[68282]: INFO nova.compute.manager [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Terminating instance [ 1532.965450] env[68282]: DEBUG nova.compute.manager [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1532.965662] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1532.965918] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-253bffbd-f5bc-4612-a3b0-70488037d182 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.969181] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1532.975827] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b40a1a9-bfa6-47f6-a481-2ac8806d944d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.005755] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eeb7149b-8d07-4968-9089-d6278c4565e5 could not be found. [ 1533.006017] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1533.006204] env[68282]: INFO nova.compute.manager [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1533.006788] env[68282]: DEBUG oslo.service.loopingcall [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.007431] env[68282]: DEBUG nova.compute.manager [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1533.007431] env[68282]: DEBUG nova.network.neutron [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1533.028239] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.028482] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.029937] env[68282]: INFO nova.compute.claims [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1533.043031] env[68282]: DEBUG nova.network.neutron [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.057889] env[68282]: INFO nova.compute.manager [-] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] Took 0.05 seconds to deallocate network for instance. [ 1533.144551] env[68282]: DEBUG oslo_concurrency.lockutils [None req-81c58984-9738-440e-8751-46001bc89d14 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.145415] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 249.547s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.145666] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: eeb7149b-8d07-4968-9089-d6278c4565e5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1533.145873] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "eeb7149b-8d07-4968-9089-d6278c4565e5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.324087] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f02d9b-b5ab-4d4f-915d-df2c3cd49d65 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.331795] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7eade21-f17a-4714-af34-7a6a8dad8645 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.361894] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6253fd-48a2-41ef-9843-a5d894aae4d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.369641] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f34d63-9f53-43c2-bb04-2b1547420c2f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.383026] env[68282]: DEBUG nova.compute.provider_tree [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1533.391673] env[68282]: DEBUG nova.scheduler.client.report [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1533.404539] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.405015] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1533.441812] env[68282]: DEBUG nova.compute.utils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1533.443075] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1533.443249] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1533.456822] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1533.520499] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1533.534610] env[68282]: DEBUG nova.policy [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf9c8c5f105b41c4aadbf92e77d43d57', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07988ee1d4514b7ba175fc83c850140f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1533.548571] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1533.548808] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1533.548969] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1533.549168] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1533.549335] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1533.549523] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1533.549745] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1533.549909] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1533.550089] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1533.550260] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1533.550430] env[68282]: DEBUG nova.virt.hardware [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1533.551317] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab7838f-8046-4d12-a639-f6afb84b9be0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.559479] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da98d71-bd0b-4d62-b376-59d7ef3ce456 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.851498] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Successfully created port: 308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1534.777715] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Successfully updated port: 308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1534.794960] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.795124] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquired lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.795273] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1534.838465] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1534.880574] env[68282]: DEBUG nova.compute.manager [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Received event network-vif-plugged-308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1534.880788] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Acquiring lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.880990] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.882017] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.882017] env[68282]: DEBUG nova.compute.manager [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] No waiting events found dispatching network-vif-plugged-308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1534.882017] env[68282]: WARNING nova.compute.manager [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Received unexpected event network-vif-plugged-308d8780-b99c-4bb9-8f84-94a159e54570 for instance with vm_state building and task_state spawning. [ 1534.882017] env[68282]: DEBUG nova.compute.manager [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Received event network-changed-308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1534.882017] env[68282]: DEBUG nova.compute.manager [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Refreshing instance network info cache due to event network-changed-308d8780-b99c-4bb9-8f84-94a159e54570. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1534.882017] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Acquiring lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.057373] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Updating instance_info_cache with network_info: [{"id": "308d8780-b99c-4bb9-8f84-94a159e54570", "address": "fa:16:3e:c6:56:37", "network": {"id": "a0cf5819-e22b-44ec-a02b-44d06e7cac3f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-877082743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07988ee1d4514b7ba175fc83c850140f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308d8780-b9", "ovs_interfaceid": "308d8780-b99c-4bb9-8f84-94a159e54570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.071092] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Releasing lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.071407] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance network_info: |[{"id": "308d8780-b99c-4bb9-8f84-94a159e54570", "address": "fa:16:3e:c6:56:37", "network": {"id": "a0cf5819-e22b-44ec-a02b-44d06e7cac3f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-877082743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07988ee1d4514b7ba175fc83c850140f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308d8780-b9", "ovs_interfaceid": "308d8780-b99c-4bb9-8f84-94a159e54570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1535.071703] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Acquired lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.071882] env[68282]: DEBUG nova.network.neutron [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Refreshing network info cache for port 308d8780-b99c-4bb9-8f84-94a159e54570 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1535.073027] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:56:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '308d8780-b99c-4bb9-8f84-94a159e54570', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1535.080829] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Creating folder: Project (07988ee1d4514b7ba175fc83c850140f). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1535.083613] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-698fbbad-2611-4287-9954-0e72eb3b9df5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.094260] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Created folder: Project (07988ee1d4514b7ba175fc83c850140f) in parent group-v693573. [ 1535.094452] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Creating folder: Instances. Parent ref: group-v693654. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1535.094674] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab57c28c-7d5e-4d8e-a4fe-b6231384e687 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.103483] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Created folder: Instances in parent group-v693654. [ 1535.103723] env[68282]: DEBUG oslo.service.loopingcall [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1535.103899] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1535.104099] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9f4c074-e3a0-48f7-a887-a72ea52e507c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.122829] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1535.122829] env[68282]: value = "task-3470566" [ 1535.122829] env[68282]: _type = "Task" [ 1535.122829] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.132441] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470566, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.466123] env[68282]: DEBUG nova.network.neutron [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Updated VIF entry in instance network info cache for port 308d8780-b99c-4bb9-8f84-94a159e54570. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1535.466512] env[68282]: DEBUG nova.network.neutron [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Updating instance_info_cache with network_info: [{"id": "308d8780-b99c-4bb9-8f84-94a159e54570", "address": "fa:16:3e:c6:56:37", "network": {"id": "a0cf5819-e22b-44ec-a02b-44d06e7cac3f", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-877082743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07988ee1d4514b7ba175fc83c850140f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap308d8780-b9", "ovs_interfaceid": "308d8780-b99c-4bb9-8f84-94a159e54570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.476805] env[68282]: DEBUG oslo_concurrency.lockutils [req-7ac94dec-3456-4d53-9c4b-dd04c732a891 req-027f19d9-f14b-4b6e-94e5-b4c67a2bf0a7 service nova] Releasing lock "refresh_cache-0e8afd42-0759-41c0-892a-c4f852d5d3e4" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.635946] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470566, 'name': CreateVM_Task, 'duration_secs': 0.374984} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.636190] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1535.636882] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.637105] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.637462] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1535.637746] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b543597-36e0-46d0-bb31-a3d3b6d89322 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.642416] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for the task: (returnval){ [ 1535.642416] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]524b3b99-dcfb-e4ec-50f9-184538444224" [ 1535.642416] env[68282]: _type = "Task" [ 1535.642416] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.650093] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]524b3b99-dcfb-e4ec-50f9-184538444224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.153080] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.153382] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1536.153471] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.643181] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.560787] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "5c2d229f-e14c-43b8-80d1-9232557b7520" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.561097] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.880687] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.880993] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.087615] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.100937] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1558.113072] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.113309] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.113484] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.113639] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1558.114758] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f5b33d-48c4-45f2-b147-b2635a87088c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.123641] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64deb383-1bb8-4fd9-9ed1-d63c5917cb30 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.138782] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6ef25a-8251-4e05-8255-6fed127de4fc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.144706] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a069dee9-c801-4e10-86bd-7c794a9418d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.172623] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180906MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1558.172769] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.172955] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.244056] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244228] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244357] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244482] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244601] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244719] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244836] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.244974] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.245105] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.245223] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1558.255682] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e0393ce7-f7d9-470d-8941-4a0ef876202d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.265585] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.274987] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 3de5e306-7c39-4204-b957-f7c3a97e1c3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.284162] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6b2315d5-4134-4be5-b1ce-6b9941b33493 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.293091] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.301948] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.311084] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1558.311586] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1558.311586] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1558.520712] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b972143d-f935-4007-982f-c5f3e2ba55b8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.528371] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2510fcb6-f115-491d-a671-301f196b1cc5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.558984] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1290de-b290-4f82-af99-f256c2fe5268 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.566372] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ae8041-54b5-467a-aabf-c39dc03176f7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.579580] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1558.587729] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1558.603536] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1558.603687] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.431s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.087981] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1559.088176] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 1559.097873] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 1563.097622] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.097924] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1565.084591] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.087239] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.087399] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1565.087526] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1565.111861] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116085] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116317] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116459] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116592] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116722] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.116882] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.117047] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.117152] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.117272] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1565.117394] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1565.117931] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.087716] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.087716] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.088062] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.087605] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.088043] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.088043] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 1576.032543] env[68282]: DEBUG oslo_concurrency.lockutils [None req-02622c8c-6d34-4226-999f-75f1a4cdaafe tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "8a88813d-7a06-45e0-ae16-b98807cb89c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.032811] env[68282]: DEBUG oslo_concurrency.lockutils [None req-02622c8c-6d34-4226-999f-75f1a4cdaafe tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "8a88813d-7a06-45e0-ae16-b98807cb89c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.623554] env[68282]: WARNING oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1577.623554] env[68282]: ERROR oslo_vmware.rw_handles [ 1577.624227] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1577.626021] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1577.626276] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Copying Virtual Disk [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/b04ec85c-ce82-41b4-839d-46ce870f9741/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1577.626562] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3357bcdb-23db-442b-812e-d0aaad5cd7df {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.634868] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1577.634868] env[68282]: value = "task-3470567" [ 1577.634868] env[68282]: _type = "Task" [ 1577.634868] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.642277] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.147076] env[68282]: DEBUG oslo_vmware.exceptions [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1578.147430] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.148096] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1578.148096] env[68282]: Faults: ['InvalidArgument'] [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Traceback (most recent call last): [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] yield resources [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self.driver.spawn(context, instance, image_meta, [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self._fetch_image_if_missing(context, vi) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] image_cache(vi, tmp_image_ds_loc) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] vm_util.copy_virtual_disk( [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] session._wait_for_task(vmdk_copy_task) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return self.wait_for_task(task_ref) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return evt.wait() [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] result = hub.switch() [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return self.greenlet.switch() [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self.f(*self.args, **self.kw) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] raise exceptions.translate_fault(task_info.error) [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Faults: ['InvalidArgument'] [ 1578.148096] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] [ 1578.149582] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Terminating instance [ 1578.150162] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.150392] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.150661] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87e7871b-6a10-4d43-9605-77d69cd37dc0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.153042] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1578.153254] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1578.154059] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96b44c4-dbd6-42a1-aafe-c49ff2bdd982 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.161589] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1578.162645] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27ccb02f-3f1b-488c-838b-02a0fcbb8214 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.164174] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1578.164354] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1578.165024] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3092b783-0f29-4989-a3a1-9217898de1d8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.170102] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1578.170102] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fe4d0a-ef78-a3bd-9596-d5c4b322f221" [ 1578.170102] env[68282]: _type = "Task" [ 1578.170102] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.177716] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52fe4d0a-ef78-a3bd-9596-d5c4b322f221, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.230904] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1578.231151] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1578.231384] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleting the datastore file [datastore2] a1676f73-3871-4f59-8440-3ccb27a9a7b9 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1578.231706] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cd61923-040b-4307-b9ea-6deba9db0d3f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.237188] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1578.237188] env[68282]: value = "task-3470569" [ 1578.237188] env[68282]: _type = "Task" [ 1578.237188] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.244442] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470569, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.679348] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1578.679699] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.679829] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5869166-8b91-4bce-8781-06163eb86a71 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.690686] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1578.690869] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Fetch image to [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1578.691056] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1578.691743] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f25787-1d47-47b0-b3af-f5fffc94277d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.697924] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af39346-1144-4437-bc2a-2b2db029b3bf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.706510] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad379a9-c3ee-4d26-9450-489396eabc16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.736609] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55acad8-32c0-442e-a88c-2e3c0eade886 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.746402] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470569, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077718} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.747772] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1578.747960] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1578.748153] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1578.748330] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1578.750060] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7cb3f412-d882-4c3c-ab91-ecbc1031a4a1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.751848] env[68282]: DEBUG nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1578.752039] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.752257] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.774324] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1578.830393] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1578.892466] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1578.892651] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1579.055616] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc69430-5b73-433d-9fac-b8ba59dd88e7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.062753] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0aa3e7-6e5f-4500-a11b-1ebd3ec042ec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.092104] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638d211c-c343-4ef9-8547-2bac099160ff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.098947] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cf4731-5dd2-4dcb-b46f-cfe7316930d7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.111996] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.120809] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1579.134603] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.382s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.135112] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1579.135112] env[68282]: Faults: ['InvalidArgument'] [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Traceback (most recent call last): [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self.driver.spawn(context, instance, image_meta, [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self._fetch_image_if_missing(context, vi) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] image_cache(vi, tmp_image_ds_loc) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] vm_util.copy_virtual_disk( [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] session._wait_for_task(vmdk_copy_task) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return self.wait_for_task(task_ref) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return evt.wait() [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] result = hub.switch() [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] return self.greenlet.switch() [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] self.f(*self.args, **self.kw) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] raise exceptions.translate_fault(task_info.error) [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Faults: ['InvalidArgument'] [ 1579.135112] env[68282]: ERROR nova.compute.manager [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] [ 1579.135872] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1579.137262] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Build of instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 was re-scheduled: A specified parameter was not correct: fileType [ 1579.137262] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1579.137621] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1579.137792] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1579.138446] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1579.138653] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1579.400272] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.412888] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Took 0.27 seconds to deallocate network for instance. [ 1579.511050] env[68282]: INFO nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted allocations for instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 [ 1579.536946] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 458.384s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.538143] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 295.939s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.538357] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] During sync_power_state the instance has a pending task (spawning). Skip. [ 1579.538542] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.539176] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 261.738s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.539395] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.539598] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.539760] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.541671] env[68282]: INFO nova.compute.manager [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Terminating instance [ 1579.543472] env[68282]: DEBUG nova.compute.manager [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1579.543669] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1579.544015] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3787dc82-3e54-4cf8-b96c-1ae1cebd86ff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.555391] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ff0465-48e2-4294-9d82-a6675bbec7f0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.565328] env[68282]: DEBUG nova.compute.manager [None req-eaa94254-3b7c-4e01-8db3-20f5ee68ca38 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 56b8d673-b69b-4f07-9c35-12c9390ed505] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.585630] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1676f73-3871-4f59-8440-3ccb27a9a7b9 could not be found. [ 1579.585801] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1579.586020] env[68282]: INFO nova.compute.manager [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1579.586387] env[68282]: DEBUG oslo.service.loopingcall [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.586622] env[68282]: DEBUG nova.compute.manager [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1579.586719] env[68282]: DEBUG nova.network.neutron [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1579.596346] env[68282]: DEBUG nova.compute.manager [None req-eaa94254-3b7c-4e01-8db3-20f5ee68ca38 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 56b8d673-b69b-4f07-9c35-12c9390ed505] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.617133] env[68282]: DEBUG nova.network.neutron [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.629729] env[68282]: INFO nova.compute.manager [-] [instance: a1676f73-3871-4f59-8440-3ccb27a9a7b9] Took 0.04 seconds to deallocate network for instance. [ 1579.635456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-eaa94254-3b7c-4e01-8db3-20f5ee68ca38 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "56b8d673-b69b-4f07-9c35-12c9390ed505" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.891s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.643959] env[68282]: DEBUG nova.compute.manager [None req-ac5909c8-cfb0-4df4-8e21-808adb7557e8 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 2f617973-033c-42e5-8451-2a565291d7c8] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.677126] env[68282]: DEBUG nova.compute.manager [None req-ac5909c8-cfb0-4df4-8e21-808adb7557e8 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 2f617973-033c-42e5-8451-2a565291d7c8] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.702298] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ac5909c8-cfb0-4df4-8e21-808adb7557e8 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "2f617973-033c-42e5-8451-2a565291d7c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.187s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.714832] env[68282]: DEBUG nova.compute.manager [None req-83809424-dbe7-4a75-af59-46874a52cc7d tempest-ServersNegativeTestMultiTenantJSON-1282295197 tempest-ServersNegativeTestMultiTenantJSON-1282295197-project-member] [instance: 2d3581e8-19f6-4665-9b3c-a89dfdefb166] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.739552] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9553000f-cf1d-4c40-bb31-70873b7055fa tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "a1676f73-3871-4f59-8440-3ccb27a9a7b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.742426] env[68282]: DEBUG nova.compute.manager [None req-83809424-dbe7-4a75-af59-46874a52cc7d tempest-ServersNegativeTestMultiTenantJSON-1282295197 tempest-ServersNegativeTestMultiTenantJSON-1282295197-project-member] [instance: 2d3581e8-19f6-4665-9b3c-a89dfdefb166] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.763994] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83809424-dbe7-4a75-af59-46874a52cc7d tempest-ServersNegativeTestMultiTenantJSON-1282295197 tempest-ServersNegativeTestMultiTenantJSON-1282295197-project-member] Lock "2d3581e8-19f6-4665-9b3c-a89dfdefb166" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.412s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.772762] env[68282]: DEBUG nova.compute.manager [None req-fc904dc4-8fbc-47d5-a3dd-3d2d2ed24e19 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 567cf5f0-3e42-4f75-8b8d-978220c161d0] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.795185] env[68282]: DEBUG nova.compute.manager [None req-fc904dc4-8fbc-47d5-a3dd-3d2d2ed24e19 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 567cf5f0-3e42-4f75-8b8d-978220c161d0] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.817439] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fc904dc4-8fbc-47d5-a3dd-3d2d2ed24e19 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "567cf5f0-3e42-4f75-8b8d-978220c161d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.072s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.826652] env[68282]: DEBUG nova.compute.manager [None req-359d61b7-53ef-4095-92c2-958fe87633ad tempest-ServerRescueTestJSONUnderV235-1077845589 tempest-ServerRescueTestJSONUnderV235-1077845589-project-member] [instance: b112af96-eff4-4b26-9161-deb9ab41afb8] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.850367] env[68282]: DEBUG nova.compute.manager [None req-359d61b7-53ef-4095-92c2-958fe87633ad tempest-ServerRescueTestJSONUnderV235-1077845589 tempest-ServerRescueTestJSONUnderV235-1077845589-project-member] [instance: b112af96-eff4-4b26-9161-deb9ab41afb8] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.871880] env[68282]: DEBUG oslo_concurrency.lockutils [None req-359d61b7-53ef-4095-92c2-958fe87633ad tempest-ServerRescueTestJSONUnderV235-1077845589 tempest-ServerRescueTestJSONUnderV235-1077845589-project-member] Lock "b112af96-eff4-4b26-9161-deb9ab41afb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.606s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.881410] env[68282]: DEBUG nova.compute.manager [None req-4f7d6e8c-2388-4a18-aaa3-7469cebe4681 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] [instance: 18588d7f-1748-4e42-b91a-83edda89e6ba] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.905323] env[68282]: DEBUG nova.compute.manager [None req-4f7d6e8c-2388-4a18-aaa3-7469cebe4681 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] [instance: 18588d7f-1748-4e42-b91a-83edda89e6ba] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.926278] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4f7d6e8c-2388-4a18-aaa3-7469cebe4681 tempest-AttachVolumeShelveTestJSON-1897179510 tempest-AttachVolumeShelveTestJSON-1897179510-project-member] Lock "18588d7f-1748-4e42-b91a-83edda89e6ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.488s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.935044] env[68282]: DEBUG nova.compute.manager [None req-1d73ac8f-5ef4-415d-aa76-cca32895dfc8 tempest-ServerAddressesTestJSON-877632188 tempest-ServerAddressesTestJSON-877632188-project-member] [instance: e0393ce7-f7d9-470d-8941-4a0ef876202d] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1579.962121] env[68282]: DEBUG nova.compute.manager [None req-1d73ac8f-5ef4-415d-aa76-cca32895dfc8 tempest-ServerAddressesTestJSON-877632188 tempest-ServerAddressesTestJSON-877632188-project-member] [instance: e0393ce7-f7d9-470d-8941-4a0ef876202d] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1579.983361] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1d73ac8f-5ef4-415d-aa76-cca32895dfc8 tempest-ServerAddressesTestJSON-877632188 tempest-ServerAddressesTestJSON-877632188-project-member] Lock "e0393ce7-f7d9-470d-8941-4a0ef876202d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.963s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.993900] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1580.038181] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.038438] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.039855] env[68282]: INFO nova.compute.claims [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1580.269895] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf06ed8-c9af-4ac8-8b79-4f7f2249c50c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.277710] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddcb58ad-b997-4e93-bdc2-8bac233af75d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.309065] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a0915d-f412-4850-92fa-bc72c78f12dc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.315767] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa34a565-2e23-4d72-ba19-cb93c09b34d5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.329825] env[68282]: DEBUG nova.compute.provider_tree [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.338658] env[68282]: DEBUG nova.scheduler.client.report [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1580.352345] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.314s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.352784] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1580.392331] env[68282]: DEBUG nova.compute.utils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1580.393505] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1580.393684] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1580.402448] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1580.467176] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1580.472728] env[68282]: DEBUG nova.policy [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91a64b1222fc43e1bdd56de560df3715', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5535c9ace9e14b478a347e1de217c943', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1580.491798] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1580.492055] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1580.492230] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.492419] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1580.492571] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.492721] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1580.492927] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1580.493103] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1580.493276] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1580.493441] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1580.493615] env[68282]: DEBUG nova.virt.hardware [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1580.494505] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b5c006-600e-4b39-ae1f-bc620916de4f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.502042] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cfa3a8-86d8-4106-8e94-6b3ffc448fab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.911580] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Successfully created port: de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.604133] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Successfully created port: 5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1582.341798] env[68282]: DEBUG nova.compute.manager [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received event network-vif-plugged-de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1582.342122] env[68282]: DEBUG oslo_concurrency.lockutils [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.342298] env[68282]: DEBUG oslo_concurrency.lockutils [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.342480] env[68282]: DEBUG oslo_concurrency.lockutils [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.342652] env[68282]: DEBUG nova.compute.manager [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] No waiting events found dispatching network-vif-plugged-de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1582.342815] env[68282]: WARNING nova.compute.manager [req-2bfbde99-3682-4666-9d5f-52d1e25ba2b4 req-364217b1-d82d-43bf-8724-a7021961d76e service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received unexpected event network-vif-plugged-de75c58a-b6d0-4fb8-8670-93cf0603cf8b for instance with vm_state building and task_state spawning. [ 1582.445533] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Successfully updated port: de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.190395] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Successfully updated port: 5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.199425] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.199699] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquired lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.199863] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1583.279800] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1584.036555] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Updating instance_info_cache with network_info: [{"id": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "address": "fa:16:3e:d8:48:47", "network": {"id": "0fc7d415-2652-4c13-8108-0884fcc9f936", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1342760396", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde75c58a-b6", "ovs_interfaceid": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5515c546-f8f8-4a17-9cab-4900501ed194", "address": "fa:16:3e:ad:b9:b1", "network": {"id": "e93c3a24-e0b3-4d10-ba97-3d681dde919d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1634036726", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5515c546-f8", "ovs_interfaceid": "5515c546-f8f8-4a17-9cab-4900501ed194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.048500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Releasing lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.048779] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance network_info: |[{"id": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "address": "fa:16:3e:d8:48:47", "network": {"id": "0fc7d415-2652-4c13-8108-0884fcc9f936", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1342760396", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde75c58a-b6", "ovs_interfaceid": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5515c546-f8f8-4a17-9cab-4900501ed194", "address": "fa:16:3e:ad:b9:b1", "network": {"id": "e93c3a24-e0b3-4d10-ba97-3d681dde919d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1634036726", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5515c546-f8", "ovs_interfaceid": "5515c546-f8f8-4a17-9cab-4900501ed194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1584.049221] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:48:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0c293d47-74c0-49d7-a474-cdb643080f6f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de75c58a-b6d0-4fb8-8670-93cf0603cf8b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:b9:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5515c546-f8f8-4a17-9cab-4900501ed194', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1584.058899] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Creating folder: Project (5535c9ace9e14b478a347e1de217c943). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1584.059751] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94425a99-fa56-4de0-bd9f-862d7838fd91 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.070446] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Created folder: Project (5535c9ace9e14b478a347e1de217c943) in parent group-v693573. [ 1584.070659] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Creating folder: Instances. Parent ref: group-v693657. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1584.070879] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37712ecf-3cab-408b-bd5c-bb4ce98799e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.079152] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Created folder: Instances in parent group-v693657. [ 1584.079379] env[68282]: DEBUG oslo.service.loopingcall [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1584.079546] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1584.079727] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bf63f4a-ae73-4d4b-99d6-6295daf0f408 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.100073] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1584.100073] env[68282]: value = "task-3470572" [ 1584.100073] env[68282]: _type = "Task" [ 1584.100073] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.109625] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470572, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.387781] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received event network-changed-de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1584.388110] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Refreshing instance network info cache due to event network-changed-de75c58a-b6d0-4fb8-8670-93cf0603cf8b. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1584.388411] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Acquiring lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.388603] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Acquired lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.388784] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Refreshing network info cache for port de75c58a-b6d0-4fb8-8670-93cf0603cf8b {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1584.609322] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470572, 'name': CreateVM_Task, 'duration_secs': 0.33877} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.610056] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1584.610788] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.610788] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.610893] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1584.611118] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9901dd9-0d99-4d10-b5e7-9420144b6ab6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.615078] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Waiting for the task: (returnval){ [ 1584.615078] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5234bb8d-0c0f-5c79-6c75-5969d18d8c0d" [ 1584.615078] env[68282]: _type = "Task" [ 1584.615078] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.622172] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5234bb8d-0c0f-5c79-6c75-5969d18d8c0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.778493] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Updated VIF entry in instance network info cache for port de75c58a-b6d0-4fb8-8670-93cf0603cf8b. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1584.778961] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Updating instance_info_cache with network_info: [{"id": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "address": "fa:16:3e:d8:48:47", "network": {"id": "0fc7d415-2652-4c13-8108-0884fcc9f936", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1342760396", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde75c58a-b6", "ovs_interfaceid": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5515c546-f8f8-4a17-9cab-4900501ed194", "address": "fa:16:3e:ad:b9:b1", "network": {"id": "e93c3a24-e0b3-4d10-ba97-3d681dde919d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1634036726", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5515c546-f8", "ovs_interfaceid": "5515c546-f8f8-4a17-9cab-4900501ed194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.791680] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Releasing lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1584.791918] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received event network-vif-plugged-5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1584.793083] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.793083] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.793083] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.793083] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] No waiting events found dispatching network-vif-plugged-5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1584.793083] env[68282]: WARNING nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received unexpected event network-vif-plugged-5515c546-f8f8-4a17-9cab-4900501ed194 for instance with vm_state building and task_state spawning. [ 1584.793083] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Received event network-changed-5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1584.793307] env[68282]: DEBUG nova.compute.manager [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Refreshing instance network info cache due to event network-changed-5515c546-f8f8-4a17-9cab-4900501ed194. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1584.793353] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Acquiring lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.793454] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Acquired lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.793612] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Refreshing network info cache for port 5515c546-f8f8-4a17-9cab-4900501ed194 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1584.915078] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.125101] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.125463] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.125571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.156188] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Updated VIF entry in instance network info cache for port 5515c546-f8f8-4a17-9cab-4900501ed194. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1585.156610] env[68282]: DEBUG nova.network.neutron [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Updating instance_info_cache with network_info: [{"id": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "address": "fa:16:3e:d8:48:47", "network": {"id": "0fc7d415-2652-4c13-8108-0884fcc9f936", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1342760396", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.192", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0c293d47-74c0-49d7-a474-cdb643080f6f", "external-id": "nsx-vlan-transportzone-172", "segmentation_id": 172, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde75c58a-b6", "ovs_interfaceid": "de75c58a-b6d0-4fb8-8670-93cf0603cf8b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5515c546-f8f8-4a17-9cab-4900501ed194", "address": "fa:16:3e:ad:b9:b1", "network": {"id": "e93c3a24-e0b3-4d10-ba97-3d681dde919d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1634036726", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.193", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "5535c9ace9e14b478a347e1de217c943", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cbc9b8f-ce19-4262-bf4d-88cd4f259a1c", "external-id": "nsx-vlan-transportzone-630", "segmentation_id": 630, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5515c546-f8", "ovs_interfaceid": "5515c546-f8f8-4a17-9cab-4900501ed194", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.166508] env[68282]: DEBUG oslo_concurrency.lockutils [req-2380dc6b-c21a-4eba-b1bb-617f3bf911a5 req-9da71742-4b81-4495-98e1-29dad9983ea8 service nova] Releasing lock "refresh_cache-a9d0de25-ef21-4725-a6c1-f6fac2593bb9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.729502] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "121db530-a9de-4bb9-9d5a-0a88d9587881" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.762418] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "aff4995e-4c8f-4ced-8743-e6cac0484875" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.762727] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.096120] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1618.107965] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.108216] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.108412] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.108570] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1618.109656] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee1678-9c5b-40dc-8e4b-9fd6eb1f700a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.118475] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5a9774-22af-4214-89a7-c32ff45e36f4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.132184] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66303886-8c04-45a3-9b17-b981966644d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.138206] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29cccda-146a-48eb-aeac-2884aa1735cd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.166026] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1618.166207] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.166392] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.353264] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.353435] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.353621] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.353700] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.353804] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.353924] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.354057] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.354224] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.354345] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.354460] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1618.366212] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1618.377172] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1618.389449] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1618.399026] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8a88813d-7a06-45e0-ae16-b98807cb89c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1618.408591] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1618.409503] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1618.409503] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1618.429060] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1618.442895] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1618.443098] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.453817] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1618.471220] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1618.648720] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69ed845-e5a7-452a-adca-9aed79662be0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.656325] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1519ec-7e0b-4657-857f-1a7934b1183f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.686329] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b865a3-545c-4a10-9f07-66ee0dacbd16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.693008] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2ee9b2-0b13-4ed4-ab8f-86932620013a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.705633] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.714235] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1618.728773] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1618.728992] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.563s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.720603] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.720900] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1625.087872] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.088078] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1625.088267] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1625.108804] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.108986] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109107] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109242] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109369] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109491] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109609] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109726] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109843] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.109962] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1625.110097] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1626.008966] env[68282]: WARNING oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1626.008966] env[68282]: ERROR oslo_vmware.rw_handles [ 1626.008966] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1626.010933] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1626.011146] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Copying Virtual Disk [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/fdf84b15-cba0-4bd9-aa9f-8d83c3f49afa/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1626.011418] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e33129e-b4f8-499c-a62c-37450ddd9fb3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.023940] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1626.023940] env[68282]: value = "task-3470573" [ 1626.023940] env[68282]: _type = "Task" [ 1626.023940] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.031319] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.104993] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1626.534355] env[68282]: DEBUG oslo_vmware.exceptions [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1626.534355] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.534870] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.534870] env[68282]: Faults: ['InvalidArgument'] [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Traceback (most recent call last): [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] yield resources [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self.driver.spawn(context, instance, image_meta, [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self._fetch_image_if_missing(context, vi) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] image_cache(vi, tmp_image_ds_loc) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] vm_util.copy_virtual_disk( [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] session._wait_for_task(vmdk_copy_task) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return self.wait_for_task(task_ref) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return evt.wait() [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] result = hub.switch() [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return self.greenlet.switch() [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self.f(*self.args, **self.kw) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] raise exceptions.translate_fault(task_info.error) [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Faults: ['InvalidArgument'] [ 1626.534870] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] [ 1626.535717] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Terminating instance [ 1626.536879] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.536996] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.537617] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1626.537804] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1626.538047] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4765efbc-1b1b-4954-9fd3-ccf59c3c8705 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.540412] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999f1ff0-d442-4ed6-a36e-0dec1792d948 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.547432] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1626.547622] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b54a5c5b-1bd7-4016-8c6c-e6578a9baab8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.549767] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.549941] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1626.550862] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6018375b-2726-4a30-a1f9-56075646da32 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.555284] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1626.555284] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52648b61-00a2-2ec0-2806-211c15d2b97b" [ 1626.555284] env[68282]: _type = "Task" [ 1626.555284] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.562297] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52648b61-00a2-2ec0-2806-211c15d2b97b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.629144] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1626.629367] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1626.629550] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleting the datastore file [datastore2] 6d7028c7-2233-4f8e-8600-bca7edb1029d {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1626.629821] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb0ff39a-874f-4352-a583-7ae4e8f72bf3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.635341] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1626.635341] env[68282]: value = "task-3470575" [ 1626.635341] env[68282]: _type = "Task" [ 1626.635341] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.642556] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.065343] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1627.065650] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating directory with path [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.065858] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-569d6caf-b3cb-4d97-9bb2-9db35ea64a85 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.076468] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Created directory with path [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.076656] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Fetch image to [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1627.076826] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1627.077554] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7061a45-821e-46c1-92e8-8b2ca272ac0e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.083233] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.085560] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600ec3ea-1fee-452b-8565-7ed4e7ea5354 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.094490] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a04b50-597f-45c0-8165-35581705f5e5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.125748] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823a075d-4eb4-4311-b159-92a97ee006be {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.131360] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.131562] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.131720] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.134256] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7ce87219-74ab-45cc-92bc-c6d9a4afef1c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.146041] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074592} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.146041] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1627.146041] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1627.146259] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1627.146463] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1627.148322] env[68282]: DEBUG nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1627.148494] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.148730] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.160776] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1627.214494] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1627.276109] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1627.276343] env[68282]: DEBUG oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1627.427239] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f983bc-b001-4eb6-9d76-a62b39c73a3d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.434794] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6151e0b1-1376-4eb9-a66f-e8515b400a45 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.464796] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d8072d-4469-4926-94ac-0fe0bb6def2d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.471826] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98a6c9f-eeda-4316-8192-98fb0d50f74a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.485107] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.493731] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1627.509586] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.510147] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.510147] env[68282]: Faults: ['InvalidArgument'] [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Traceback (most recent call last): [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self.driver.spawn(context, instance, image_meta, [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self._fetch_image_if_missing(context, vi) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] image_cache(vi, tmp_image_ds_loc) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] vm_util.copy_virtual_disk( [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] session._wait_for_task(vmdk_copy_task) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return self.wait_for_task(task_ref) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return evt.wait() [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] result = hub.switch() [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] return self.greenlet.switch() [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] self.f(*self.args, **self.kw) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] raise exceptions.translate_fault(task_info.error) [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Faults: ['InvalidArgument'] [ 1627.510147] env[68282]: ERROR nova.compute.manager [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] [ 1627.510997] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1627.512259] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Build of instance 6d7028c7-2233-4f8e-8600-bca7edb1029d was re-scheduled: A specified parameter was not correct: fileType [ 1627.512259] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1627.512652] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1627.512839] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1627.513027] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1627.513198] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1627.849622] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.861340] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Took 0.35 seconds to deallocate network for instance. [ 1627.991351] env[68282]: INFO nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted allocations for instance 6d7028c7-2233-4f8e-8600-bca7edb1029d [ 1628.014466] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 506.827s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.015802] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 344.416s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.016015] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1628.016221] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.016917] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 310.321s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.017658] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.017658] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.017658] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.019625] env[68282]: INFO nova.compute.manager [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Terminating instance [ 1628.021192] env[68282]: DEBUG nova.compute.manager [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1628.021384] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1628.021641] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c40feec-ddc6-4089-bab3-fa7da613052a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.032279] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b17839-3b5d-4b38-9c2a-4b0ae3564747 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.044572] env[68282]: DEBUG nova.compute.manager [None req-6ef0eb15-f223-46ec-80ed-3e4d8df92310 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: 3de5e306-7c39-4204-b957-f7c3a97e1c3c] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1628.066423] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6d7028c7-2233-4f8e-8600-bca7edb1029d could not be found. [ 1628.066697] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1628.066801] env[68282]: INFO nova.compute.manager [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1628.067078] env[68282]: DEBUG oslo.service.loopingcall [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.068795] env[68282]: DEBUG nova.compute.manager [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1628.068795] env[68282]: DEBUG nova.network.neutron [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1628.071119] env[68282]: DEBUG nova.compute.manager [None req-6ef0eb15-f223-46ec-80ed-3e4d8df92310 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: 3de5e306-7c39-4204-b957-f7c3a97e1c3c] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1628.091391] env[68282]: DEBUG nova.network.neutron [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.093435] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6ef0eb15-f223-46ec-80ed-3e4d8df92310 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "3de5e306-7c39-4204-b957-f7c3a97e1c3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.627s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.098971] env[68282]: INFO nova.compute.manager [-] [instance: 6d7028c7-2233-4f8e-8600-bca7edb1029d] Took 0.03 seconds to deallocate network for instance. [ 1628.103604] env[68282]: DEBUG nova.compute.manager [None req-76b45784-11db-41cc-8c28-71f112e4ba6d tempest-ServersListShow296Test-1655116262 tempest-ServersListShow296Test-1655116262-project-member] [instance: 6b2315d5-4134-4be5-b1ce-6b9941b33493] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1628.126274] env[68282]: DEBUG nova.compute.manager [None req-76b45784-11db-41cc-8c28-71f112e4ba6d tempest-ServersListShow296Test-1655116262 tempest-ServersListShow296Test-1655116262-project-member] [instance: 6b2315d5-4134-4be5-b1ce-6b9941b33493] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1628.146139] env[68282]: DEBUG oslo_concurrency.lockutils [None req-76b45784-11db-41cc-8c28-71f112e4ba6d tempest-ServersListShow296Test-1655116262 tempest-ServersListShow296Test-1655116262-project-member] Lock "6b2315d5-4134-4be5-b1ce-6b9941b33493" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.719s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.157116] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1628.227731] env[68282]: DEBUG oslo_concurrency.lockutils [None req-89fb1274-b1ad-408c-887c-709be98b3e26 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "6d7028c7-2233-4f8e-8600-bca7edb1029d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.211s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.237801] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.238198] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.239621] env[68282]: INFO nova.compute.claims [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.443884] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc009c0-1a38-4d12-9c1e-e1ed0e099beb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.451163] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c9b80a-005a-41b0-909b-a0a99ffbc2d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.482425] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086da6ec-c6a6-4b3a-98dd-3e778710d969 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.489982] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70caf97e-b8d9-4094-b535-32cb7d68d08a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.503130] env[68282]: DEBUG nova.compute.provider_tree [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.512124] env[68282]: DEBUG nova.scheduler.client.report [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1628.525995] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.526560] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1628.558535] env[68282]: DEBUG nova.compute.utils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.561080] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1628.561080] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1628.570690] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1628.621263] env[68282]: DEBUG nova.policy [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeff02086d114be7816a6d2558c9c8fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2948d9c0a046a09077c014de41faeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1628.631082] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1628.656634] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1628.656877] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1628.657474] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1628.657474] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1628.657474] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1628.657640] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1628.657734] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1628.657889] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1628.658064] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1628.658226] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1628.658394] env[68282]: DEBUG nova.virt.hardware [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1628.659373] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977937ba-3fd3-4709-8809-42c1ffa6d164 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.667091] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1c2ab3-8961-4f8d-aa31-39bf4c0432d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.981452] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Successfully created port: 87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1629.087348] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.088822] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.354214] env[68282]: DEBUG nova.compute.manager [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Received event network-vif-plugged-87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1630.354492] env[68282]: DEBUG oslo_concurrency.lockutils [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] Acquiring lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.354686] env[68282]: DEBUG oslo_concurrency.lockutils [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.354850] env[68282]: DEBUG oslo_concurrency.lockutils [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.360014] env[68282]: DEBUG nova.compute.manager [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] No waiting events found dispatching network-vif-plugged-87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1630.360322] env[68282]: WARNING nova.compute.manager [req-89f2b69d-e04f-4ff9-8679-ca525dbdd41f req-d3ba87d4-257d-4b50-beec-6bc553c10e42 service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Received unexpected event network-vif-plugged-87981ff2-3e1c-40e8-b5bb-3188f34d032e for instance with vm_state building and task_state spawning. [ 1630.579625] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Successfully updated port: 87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.597673] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.597833] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.597984] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1630.684745] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1631.023270] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Updating instance_info_cache with network_info: [{"id": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "address": "fa:16:3e:52:13:69", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87981ff2-3e", "ovs_interfaceid": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.038500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.038844] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance network_info: |[{"id": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "address": "fa:16:3e:52:13:69", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87981ff2-3e", "ovs_interfaceid": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1631.039290] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:13:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87981ff2-3e1c-40e8-b5bb-3188f34d032e', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1631.047043] env[68282]: DEBUG oslo.service.loopingcall [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.047532] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1631.047751] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6035145c-ebc6-4a07-aeb1-5caa6d598f3b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.068213] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1631.068213] env[68282]: value = "task-3470576" [ 1631.068213] env[68282]: _type = "Task" [ 1631.068213] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.075555] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470576, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.578975] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470576, 'name': CreateVM_Task, 'duration_secs': 0.304811} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.579230] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1631.579943] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.580173] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.580542] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1631.580824] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e343afea-71fe-48da-b05f-9c6452b90212 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.586494] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1631.586494] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]526df987-6f50-be91-9157-5f89c5b69bfc" [ 1631.586494] env[68282]: _type = "Task" [ 1631.586494] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.594810] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]526df987-6f50-be91-9157-5f89c5b69bfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.096018] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.096345] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1632.096652] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.386110] env[68282]: DEBUG nova.compute.manager [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Received event network-changed-87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1632.386110] env[68282]: DEBUG nova.compute.manager [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Refreshing instance network info cache due to event network-changed-87981ff2-3e1c-40e8-b5bb-3188f34d032e. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1632.386322] env[68282]: DEBUG oslo_concurrency.lockutils [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] Acquiring lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.386454] env[68282]: DEBUG oslo_concurrency.lockutils [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] Acquired lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.386618] env[68282]: DEBUG nova.network.neutron [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Refreshing network info cache for port 87981ff2-3e1c-40e8-b5bb-3188f34d032e {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1632.825805] env[68282]: DEBUG nova.network.neutron [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Updated VIF entry in instance network info cache for port 87981ff2-3e1c-40e8-b5bb-3188f34d032e. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1632.826193] env[68282]: DEBUG nova.network.neutron [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Updating instance_info_cache with network_info: [{"id": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "address": "fa:16:3e:52:13:69", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87981ff2-3e", "ovs_interfaceid": "87981ff2-3e1c-40e8-b5bb-3188f34d032e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.835068] env[68282]: DEBUG oslo_concurrency.lockutils [req-946d0eaf-2e7b-4611-8a62-5695c3681e28 req-eabee00c-9659-45ff-9788-d0c10fe6250b service nova] Releasing lock "refresh_cache-7bc5117e-58d1-4c08-b778-7045b1076b94" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.656427] env[68282]: WARNING oslo_vmware.rw_handles [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1672.656427] env[68282]: ERROR oslo_vmware.rw_handles [ 1672.657185] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1672.658997] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1672.659255] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Copying Virtual Disk [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/1471d26e-8d19-4062-869e-0399e07c8ccb/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1672.659553] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1895e70-5df2-4466-902a-f5eb6e68045d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.667826] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1672.667826] env[68282]: value = "task-3470577" [ 1672.667826] env[68282]: _type = "Task" [ 1672.667826] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.676670] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470577, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.178658] env[68282]: DEBUG oslo_vmware.exceptions [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1673.179033] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.179607] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.179607] env[68282]: Faults: ['InvalidArgument'] [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Traceback (most recent call last): [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] yield resources [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self.driver.spawn(context, instance, image_meta, [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self._fetch_image_if_missing(context, vi) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] image_cache(vi, tmp_image_ds_loc) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] vm_util.copy_virtual_disk( [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] session._wait_for_task(vmdk_copy_task) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return self.wait_for_task(task_ref) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return evt.wait() [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] result = hub.switch() [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return self.greenlet.switch() [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self.f(*self.args, **self.kw) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] raise exceptions.translate_fault(task_info.error) [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Faults: ['InvalidArgument'] [ 1673.179607] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] [ 1673.180702] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Terminating instance [ 1673.182080] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.182080] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.182227] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef764ecd-72ce-4657-916c-cc87ce577d4f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.184826] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1673.185038] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1673.185746] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5fbb46-6a9b-42ce-ab24-52d9aed737c5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.192845] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1673.193094] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b33a41ab-b00c-45c9-92ca-47c3f65efaaa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.195205] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.195377] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1673.196312] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf527e4e-d4f5-4e0c-ae9e-b39091e5e2db {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.201194] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1673.201194] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c265b4-dce1-dcd7-165b-e49b6e9e4430" [ 1673.201194] env[68282]: _type = "Task" [ 1673.201194] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.209146] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c265b4-dce1-dcd7-165b-e49b6e9e4430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.258177] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1673.258402] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1673.258583] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleting the datastore file [datastore2] d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.258839] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e34b4db5-a0aa-446e-8c72-6f4731acb841 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.264694] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for the task: (returnval){ [ 1673.264694] env[68282]: value = "task-3470579" [ 1673.264694] env[68282]: _type = "Task" [ 1673.264694] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.272133] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.712637] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1673.713030] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1673.713136] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3b3a936-cf7b-4973-9092-ed86c1fac69e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.724577] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.724767] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Fetch image to [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1673.724940] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1673.725650] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7e2368-af3c-4175-a86a-4b6906b6eb8e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.733537] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-137a249b-96fb-4dbe-aa2c-e62581fd484d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.742826] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee3c3b1-1c7d-4561-9d8b-11552071aed4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.775908] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790700eb-0444-4c40-8684-b1553f052406 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.785620] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6f010b3c-93c7-47d7-bc38-195db8f70cc3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.787276] env[68282]: DEBUG oslo_vmware.api [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Task: {'id': task-3470579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077585} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.787510] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.787712] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1673.787888] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1673.788074] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1673.790161] env[68282]: DEBUG nova.compute.claims [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1673.790398] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.790632] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.807859] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1673.896635] env[68282]: DEBUG oslo_vmware.rw_handles [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1673.958514] env[68282]: DEBUG oslo_vmware.rw_handles [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1673.958724] env[68282]: DEBUG oslo_vmware.rw_handles [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1674.108592] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8096b063-6499-4cf6-8350-5b6a39a366f5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.116940] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a8c7ee-6265-49d0-84e6-59d7ad8185aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.146793] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ba0c0f-2e37-400f-8535-4bda7fcf9204 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.153724] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21adb50-d247-4688-ac5d-82e0aa183076 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.166630] env[68282]: DEBUG nova.compute.provider_tree [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.174874] env[68282]: DEBUG nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1674.209393] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.419s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.209977] env[68282]: ERROR nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.209977] env[68282]: Faults: ['InvalidArgument'] [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Traceback (most recent call last): [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self.driver.spawn(context, instance, image_meta, [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self._fetch_image_if_missing(context, vi) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] image_cache(vi, tmp_image_ds_loc) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] vm_util.copy_virtual_disk( [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] session._wait_for_task(vmdk_copy_task) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return self.wait_for_task(task_ref) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return evt.wait() [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] result = hub.switch() [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] return self.greenlet.switch() [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] self.f(*self.args, **self.kw) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] raise exceptions.translate_fault(task_info.error) [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Faults: ['InvalidArgument'] [ 1674.209977] env[68282]: ERROR nova.compute.manager [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] [ 1674.210885] env[68282]: DEBUG nova.compute.utils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1674.212177] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Build of instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 was re-scheduled: A specified parameter was not correct: fileType [ 1674.212177] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1674.212548] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1674.212722] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1674.212895] env[68282]: DEBUG nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1674.213220] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1674.856406] env[68282]: DEBUG nova.network.neutron [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.884046] env[68282]: INFO nova.compute.manager [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Took 0.67 seconds to deallocate network for instance. [ 1675.002553] env[68282]: INFO nova.scheduler.client.report [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Deleted allocations for instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 [ 1675.057056] env[68282]: DEBUG oslo_concurrency.lockutils [None req-849a1c41-ba7e-4902-b9cd-8bc8dc0182e2 tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 553.842s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.058183] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 391.459s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.058389] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] During sync_power_state the instance has a pending task (spawning). Skip. [ 1675.058568] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.059201] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 357.561s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.059419] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Acquiring lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.059623] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.059789] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.061663] env[68282]: INFO nova.compute.manager [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Terminating instance [ 1675.063305] env[68282]: DEBUG nova.compute.manager [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1675.063497] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1675.063750] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99962d8e-312b-474d-84c3-986a6ea9e814 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.073136] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c0c4bb-9749-4471-89ba-abac1c58ffeb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.084155] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1675.104805] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763 could not be found. [ 1675.105040] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1675.106323] env[68282]: INFO nova.compute.manager [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1675.106323] env[68282]: DEBUG oslo.service.loopingcall [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.106323] env[68282]: DEBUG nova.compute.manager [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1675.106323] env[68282]: DEBUG nova.network.neutron [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1675.136307] env[68282]: DEBUG nova.network.neutron [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.143383] env[68282]: INFO nova.compute.manager [-] [instance: d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763] Took 0.04 seconds to deallocate network for instance. [ 1675.159450] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.159698] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.161097] env[68282]: INFO nova.compute.claims [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1675.266434] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f7c03545-61d5-4afd-9374-deb2ad011bbb tempest-ListServersNegativeTestJSON-1734619492 tempest-ListServersNegativeTestJSON-1734619492-project-member] Lock "d53e6bc9-f8d3-41dd-adc1-e0d7d5c22763" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.368499] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1802290c-c1ac-4b14-8a1d-be3bd08711fd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.376208] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f544912d-f86e-4683-9518-39c4bcff45c9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.406723] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453ad800-d2be-4eea-a1c4-29209584337d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.413796] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7583b6-dd85-4fb3-b566-06723fe69950 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.426502] env[68282]: DEBUG nova.compute.provider_tree [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.434603] env[68282]: DEBUG nova.scheduler.client.report [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1675.451921] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.452399] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1675.529293] env[68282]: DEBUG nova.compute.utils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1675.530572] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1675.530819] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1675.539734] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1675.604489] env[68282]: DEBUG nova.policy [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a603a37bebe4a1c9b8f5d3b1cf5f34f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0695dbb014ba4c359215dbb84bb1d314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1675.622391] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1675.667853] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.668133] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.668298] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.668513] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.668622] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.668772] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.668982] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.669179] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.669355] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.669522] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.669715] env[68282]: DEBUG nova.virt.hardware [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.670579] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6abcae-b5fa-41cc-8294-c759ff12e76d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.678635] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543f4d84-0559-4e93-b54e-6e41f4ae8db5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.017184] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Successfully created port: cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1676.908363] env[68282]: DEBUG nova.compute.manager [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Received event network-vif-plugged-cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1676.908942] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] Acquiring lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.908942] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.908942] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.909157] env[68282]: DEBUG nova.compute.manager [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] No waiting events found dispatching network-vif-plugged-cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1676.909236] env[68282]: WARNING nova.compute.manager [req-06f4679b-7143-458f-a101-a82dcacccd1d req-8ebc81d1-1ea5-444f-9802-4379186c7726 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Received unexpected event network-vif-plugged-cc47955f-7492-4f80-8a87-9fb7d9f994ff for instance with vm_state building and task_state spawning. [ 1676.993427] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Successfully updated port: cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1677.023488] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.023488] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.023488] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1677.072681] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1677.257289] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Updating instance_info_cache with network_info: [{"id": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "address": "fa:16:3e:27:99:88", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc47955f-74", "ovs_interfaceid": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.278546] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.278839] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance network_info: |[{"id": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "address": "fa:16:3e:27:99:88", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc47955f-74", "ovs_interfaceid": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1677.279271] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:99:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc47955f-7492-4f80-8a87-9fb7d9f994ff', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.287474] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating folder: Project (0695dbb014ba4c359215dbb84bb1d314). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1677.288056] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-235a7166-221b-4e17-ac4b-d960995c3554 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.297704] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created folder: Project (0695dbb014ba4c359215dbb84bb1d314) in parent group-v693573. [ 1677.297897] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating folder: Instances. Parent ref: group-v693661. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1677.298137] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27f9231d-550f-4b49-b6bf-eedc74e9cbdc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.306248] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created folder: Instances in parent group-v693661. [ 1677.306470] env[68282]: DEBUG oslo.service.loopingcall [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.306676] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1677.306869] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-967f0c0b-56fb-4d48-995b-981353002231 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.324780] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.324780] env[68282]: value = "task-3470582" [ 1677.324780] env[68282]: _type = "Task" [ 1677.324780] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.331905] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470582, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.834238] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470582, 'name': CreateVM_Task, 'duration_secs': 0.273128} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.834397] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1677.835125] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.835300] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.835622] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1677.835864] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c6c177-847c-4058-8551-35965589c3ae {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.840158] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 1677.840158] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5239f549-b18f-caed-417a-d0d9b2cc021c" [ 1677.840158] env[68282]: _type = "Task" [ 1677.840158] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.847285] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5239f549-b18f-caed-417a-d0d9b2cc021c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.350870] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.351207] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.351365] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.932492] env[68282]: DEBUG nova.compute.manager [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Received event network-changed-cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1678.932716] env[68282]: DEBUG nova.compute.manager [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Refreshing instance network info cache due to event network-changed-cc47955f-7492-4f80-8a87-9fb7d9f994ff. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1678.932931] env[68282]: DEBUG oslo_concurrency.lockutils [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] Acquiring lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.933239] env[68282]: DEBUG oslo_concurrency.lockutils [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] Acquired lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.933453] env[68282]: DEBUG nova.network.neutron [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Refreshing network info cache for port cc47955f-7492-4f80-8a87-9fb7d9f994ff {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1679.530265] env[68282]: DEBUG nova.network.neutron [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Updated VIF entry in instance network info cache for port cc47955f-7492-4f80-8a87-9fb7d9f994ff. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1679.530660] env[68282]: DEBUG nova.network.neutron [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Updating instance_info_cache with network_info: [{"id": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "address": "fa:16:3e:27:99:88", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc47955f-74", "ovs_interfaceid": "cc47955f-7492-4f80-8a87-9fb7d9f994ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1679.541768] env[68282]: DEBUG oslo_concurrency.lockutils [req-c35e8c6a-1081-402d-a506-daefd8670ed9 req-57b125e0-82c4-4dfb-891f-1d5147cb6311 service nova] Releasing lock "refresh_cache-5c2d229f-e14c-43b8-80d1-9232557b7520" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.086686] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1680.101537] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.101783] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.101988] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.102171] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1680.103397] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ba3682-0c8b-45d5-9b0a-be269e6d795a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.111901] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd63569-d052-429b-8c11-375aa64be3c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.125264] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0644da2-db49-4f12-be82-39eefc6fbadb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.131316] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9062ed-1231-44b5-90b6-220d63d63e37 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.161362] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1680.161473] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.161650] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.269811] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6680219f-25bf-453c-ba97-4aeb3295f62b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.269977] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270149] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270306] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270439] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270562] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270679] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270797] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.270912] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.271036] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1680.294638] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1680.305355] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8a88813d-7a06-45e0-ae16-b98807cb89c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1680.315887] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1680.316128] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1680.316279] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1680.464855] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b766b69d-75fc-47ef-89ba-d477fdb46262 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.472828] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8bb42e-7d3b-4e12-8235-f3c4082ea2aa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.502633] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa51eca-f79e-4499-a393-6b29a63bcd5c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.509873] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c7a966-1914-4345-b611-429ced589cec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.523515] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1680.532250] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1680.550113] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1680.550296] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.389s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.551911] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.552215] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1685.552253] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1685.571960] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572121] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572257] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572384] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572508] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572631] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572750] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572870] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.572988] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.573123] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1685.573248] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1685.573721] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.573864] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1686.104265] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.086690] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1688.087383] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.087622] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.087989] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.087715] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.170782] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "7bc5117e-58d1-4c08-b778-7045b1076b94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.019992] env[68282]: WARNING oslo_vmware.rw_handles [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1721.019992] env[68282]: ERROR oslo_vmware.rw_handles [ 1721.020707] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1721.022773] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1721.023039] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Copying Virtual Disk [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/b9f88ce8-af0a-41e5-adeb-7d6926130b13/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1721.023354] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d2f7041-c832-4388-9cdb-3d86793e770b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.031586] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1721.031586] env[68282]: value = "task-3470583" [ 1721.031586] env[68282]: _type = "Task" [ 1721.031586] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.039417] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470583, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.542522] env[68282]: DEBUG oslo_vmware.exceptions [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1721.542814] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.543363] env[68282]: ERROR nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.543363] env[68282]: Faults: ['InvalidArgument'] [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Traceback (most recent call last): [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] yield resources [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self.driver.spawn(context, instance, image_meta, [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self._fetch_image_if_missing(context, vi) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] image_cache(vi, tmp_image_ds_loc) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] vm_util.copy_virtual_disk( [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] session._wait_for_task(vmdk_copy_task) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return self.wait_for_task(task_ref) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return evt.wait() [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] result = hub.switch() [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return self.greenlet.switch() [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self.f(*self.args, **self.kw) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] raise exceptions.translate_fault(task_info.error) [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Faults: ['InvalidArgument'] [ 1721.543363] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] [ 1721.544200] env[68282]: INFO nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Terminating instance [ 1721.545226] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.545430] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.545663] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-318adf48-753d-42b8-9928-72fe9f8acbe1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.547789] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1721.547999] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1721.548707] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1299921e-c2c2-443c-9e1b-9430b718d1d8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.555812] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1721.556786] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05627a3b-1468-47e3-ab97-74b194e13bc4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.558208] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.558384] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1721.559041] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a007e4bd-f628-4428-b32a-88c9eadc076e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.563842] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for the task: (returnval){ [ 1721.563842] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f5bf95-b4fd-d9b7-ad72-722601bda8e0" [ 1721.563842] env[68282]: _type = "Task" [ 1721.563842] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.572816] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f5bf95-b4fd-d9b7-ad72-722601bda8e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.631186] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1721.633527] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1721.633527] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleting the datastore file [datastore2] 6680219f-25bf-453c-ba97-4aeb3295f62b {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1721.633527] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db0d4ecf-4c64-4919-baa4-0d268e221be4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.638754] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1721.638754] env[68282]: value = "task-3470585" [ 1721.638754] env[68282]: _type = "Task" [ 1721.638754] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.646820] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.076628] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1722.077045] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Creating directory with path [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.077134] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb1317b1-7f00-47ed-add5-13a557c95ec4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.089449] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Created directory with path [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.089628] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Fetch image to [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1722.089798] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1722.090503] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b590343-64a5-44fd-8059-3772a7be64bb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.096495] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9419191b-f76f-4cc8-bbdf-038cd2d5665f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.105191] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f184bf6-e299-4fca-ab76-2dff4b9dae14 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.135261] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da66d7a0-fc53-4204-bf10-49c6cbec984b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.142822] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-938c14f3-e8dd-44e5-892a-b79907ecbe4b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.148625] env[68282]: DEBUG oslo_vmware.api [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07424} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.148854] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1722.149056] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1722.149324] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1722.149507] env[68282]: INFO nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1722.151660] env[68282]: DEBUG nova.compute.claims [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1722.151912] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.152158] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.162914] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1722.212946] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1722.278149] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1722.278360] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1722.394527] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730193ad-8bc9-4337-ade8-0155203ec41e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.401900] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ce5363-b3ba-4ef8-851e-67c1f3c9a018 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.431593] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733ed320-28a4-4d5e-b01a-952b38764996 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.438277] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f66b32-6cd9-4b71-8064-14e13e9898c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.450896] env[68282]: DEBUG nova.compute.provider_tree [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.459857] env[68282]: DEBUG nova.scheduler.client.report [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1722.472721] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.473255] env[68282]: ERROR nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.473255] env[68282]: Faults: ['InvalidArgument'] [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Traceback (most recent call last): [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self.driver.spawn(context, instance, image_meta, [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self._fetch_image_if_missing(context, vi) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] image_cache(vi, tmp_image_ds_loc) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] vm_util.copy_virtual_disk( [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] session._wait_for_task(vmdk_copy_task) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return self.wait_for_task(task_ref) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return evt.wait() [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] result = hub.switch() [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] return self.greenlet.switch() [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] self.f(*self.args, **self.kw) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] raise exceptions.translate_fault(task_info.error) [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Faults: ['InvalidArgument'] [ 1722.473255] env[68282]: ERROR nova.compute.manager [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] [ 1722.474111] env[68282]: DEBUG nova.compute.utils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1722.475307] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Build of instance 6680219f-25bf-453c-ba97-4aeb3295f62b was re-scheduled: A specified parameter was not correct: fileType [ 1722.475307] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1722.475684] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1722.475858] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1722.476045] env[68282]: DEBUG nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1722.476217] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.864595] env[68282]: DEBUG nova.network.neutron [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.877532] env[68282]: INFO nova.compute.manager [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Took 0.40 seconds to deallocate network for instance. [ 1723.031838] env[68282]: INFO nova.scheduler.client.report [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted allocations for instance 6680219f-25bf-453c-ba97-4aeb3295f62b [ 1723.072858] env[68282]: DEBUG oslo_concurrency.lockutils [None req-44559fa3-281c-4e83-9bcc-27beb061c4ee tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.252s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.074292] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.579s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.074589] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.074733] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.074904] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.077418] env[68282]: INFO nova.compute.manager [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Terminating instance [ 1723.079114] env[68282]: DEBUG nova.compute.manager [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1723.079322] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1723.079805] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44dc4e9a-97d3-4b76-8313-10b4dedea315 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.089056] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae2e063-2632-4453-86d2-fd42a79a8ff7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.100113] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1723.121677] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6680219f-25bf-453c-ba97-4aeb3295f62b could not be found. [ 1723.121879] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1723.122067] env[68282]: INFO nova.compute.manager [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1723.122310] env[68282]: DEBUG oslo.service.loopingcall [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.122536] env[68282]: DEBUG nova.compute.manager [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1723.122629] env[68282]: DEBUG nova.network.neutron [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1723.151314] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.151565] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.152999] env[68282]: INFO nova.compute.claims [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.155781] env[68282]: DEBUG nova.network.neutron [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.168877] env[68282]: INFO nova.compute.manager [-] [instance: 6680219f-25bf-453c-ba97-4aeb3295f62b] Took 0.05 seconds to deallocate network for instance. [ 1723.281653] env[68282]: DEBUG oslo_concurrency.lockutils [None req-bd34ad5c-7604-4054-a57e-3e4ccb362f43 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "6680219f-25bf-453c-ba97-4aeb3295f62b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.361293] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0bd5fc-daa8-4304-a725-8b261ab2e083 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.369211] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d6d2b7-9704-47a4-aeb7-d6652d7ac6c1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.397944] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f8601f-f6e8-4856-b4f5-c9156a01a93b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.404648] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c9b58a-8652-46ab-86df-3330344bdcae {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.418190] env[68282]: DEBUG nova.compute.provider_tree [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.427542] env[68282]: DEBUG nova.scheduler.client.report [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1723.440384] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.440834] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1723.480945] env[68282]: DEBUG nova.compute.utils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.482206] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1723.482381] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1723.491342] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1723.547867] env[68282]: DEBUG nova.policy [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '586f6880a99449eeab1379280df867a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12975c11434b4530b1f38c1eceaa4e68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1723.552685] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1723.579322] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.579775] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.579775] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.579932] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.580095] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.580249] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.580456] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.580617] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.580787] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.580955] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.581153] env[68282]: DEBUG nova.virt.hardware [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.582058] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fef3e3-ea13-41fd-bf0b-318a6722c10b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.590044] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d573e6f-cc2f-467a-8e35-8e35dfb11aad {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.853015] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Successfully created port: 0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.717401] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Successfully updated port: 0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.729376] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.729536] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.729687] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1724.766286] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1725.027631] env[68282]: DEBUG nova.compute.manager [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Received event network-vif-plugged-0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1725.027866] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Acquiring lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.028108] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.028296] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.028490] env[68282]: DEBUG nova.compute.manager [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] No waiting events found dispatching network-vif-plugged-0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1725.028620] env[68282]: WARNING nova.compute.manager [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Received unexpected event network-vif-plugged-0328382f-1016-41bb-9d50-d06173abae41 for instance with vm_state building and task_state spawning. [ 1725.028775] env[68282]: DEBUG nova.compute.manager [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Received event network-changed-0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1725.028925] env[68282]: DEBUG nova.compute.manager [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Refreshing instance network info cache due to event network-changed-0328382f-1016-41bb-9d50-d06173abae41. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1725.029685] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Acquiring lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.156056] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Updating instance_info_cache with network_info: [{"id": "0328382f-1016-41bb-9d50-d06173abae41", "address": "fa:16:3e:a4:c1:e8", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0328382f-10", "ovs_interfaceid": "0328382f-1016-41bb-9d50-d06173abae41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.170733] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.171050] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance network_info: |[{"id": "0328382f-1016-41bb-9d50-d06173abae41", "address": "fa:16:3e:a4:c1:e8", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0328382f-10", "ovs_interfaceid": "0328382f-1016-41bb-9d50-d06173abae41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1725.171411] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Acquired lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.171541] env[68282]: DEBUG nova.network.neutron [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Refreshing network info cache for port 0328382f-1016-41bb-9d50-d06173abae41 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1725.172629] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:c1:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0328382f-1016-41bb-9d50-d06173abae41', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1725.179994] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating folder: Project (12975c11434b4530b1f38c1eceaa4e68). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1725.182827] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d91cea7-fcb3-42b7-b590-5313f1c1e9f1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.194423] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created folder: Project (12975c11434b4530b1f38c1eceaa4e68) in parent group-v693573. [ 1725.194604] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating folder: Instances. Parent ref: group-v693664. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1725.194829] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af816f1f-8492-4780-920e-22b45570daf8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.203248] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created folder: Instances in parent group-v693664. [ 1725.203496] env[68282]: DEBUG oslo.service.loopingcall [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.203746] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1725.203841] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2059ab12-0c6c-48f9-9073-40639228f9ca {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.223646] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1725.223646] env[68282]: value = "task-3470588" [ 1725.223646] env[68282]: _type = "Task" [ 1725.223646] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.233514] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470588, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.575730] env[68282]: DEBUG nova.network.neutron [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Updated VIF entry in instance network info cache for port 0328382f-1016-41bb-9d50-d06173abae41. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1725.576239] env[68282]: DEBUG nova.network.neutron [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Updating instance_info_cache with network_info: [{"id": "0328382f-1016-41bb-9d50-d06173abae41", "address": "fa:16:3e:a4:c1:e8", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0328382f-10", "ovs_interfaceid": "0328382f-1016-41bb-9d50-d06173abae41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.586059] env[68282]: DEBUG oslo_concurrency.lockutils [req-e2ced05c-404f-4071-a9f7-5910d9ca3aa5 req-0560286f-2528-4048-8f95-6ee6c1d913d9 service nova] Releasing lock "refresh_cache-b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.734961] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470588, 'name': CreateVM_Task, 'duration_secs': 0.302368} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.735325] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1725.735820] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.735988] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.736338] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1725.736572] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-837d3491-e0b3-42b3-9ceb-789e4e1a78e0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.740663] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 1725.740663] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52329ba5-20af-3ce1-b015-674b98072180" [ 1725.740663] env[68282]: _type = "Task" [ 1725.740663] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.749031] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52329ba5-20af-3ce1-b015-674b98072180, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.184864] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "423ea779-232c-4662-acbd-9d2edec5867b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.184864] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.252593] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.252593] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1726.252593] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.028571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "5c2d229f-e14c-43b8-80d1-9232557b7520" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.087386] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1742.100169] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.100399] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.100647] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.100864] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1742.102395] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cc3484-5362-42f6-a525-8db9efb408ff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.110939] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c21225-4c53-4dfb-b588-1fbb64197958 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.124387] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5106f784-000d-436c-8b6b-f041121dce2f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.130349] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852e7c7c-293c-48bb-bdcd-0863be49a2fe {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.160359] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180941MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1742.160484] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.160670] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.231346] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 4340e67d-0b82-4f16-8c49-88886a57523f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.231512] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.231642] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.231765] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.231886] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.232011] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.232140] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.232258] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.232375] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.232489] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1742.243346] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8a88813d-7a06-45e0-ae16-b98807cb89c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.252692] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.261387] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1742.261597] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1742.261740] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1742.404100] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49362274-7af8-4e19-bef4-9f8d66bc1cdc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.413368] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f682f7f2-fa7b-4ad4-92b1-4feb0ef6c6ca {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.454849] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a5bb66-91dc-481b-a039-dd8f4deb5b25 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.462177] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b634ac32-2a93-452d-a3d8-ff18eb599fd8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.475816] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.484181] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1742.499095] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1742.499292] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.339s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.500089] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1745.500089] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1745.500089] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1745.519841] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.519994] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520131] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520261] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520384] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520504] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520623] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520740] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520864] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.520991] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1745.521125] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1745.521627] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1745.521773] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1748.088248] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.088620] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.109506] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.087546] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.257567] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1751.087710] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.088044] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.087910] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.562244] env[68282]: WARNING oslo_vmware.rw_handles [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1771.562244] env[68282]: ERROR oslo_vmware.rw_handles [ 1771.562918] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1771.564694] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1771.564939] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Copying Virtual Disk [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/c0602516-c1ff-4612-b4a1-593551312b9d/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1771.565241] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83250969-3790-4ad9-a456-c7f09b0d5546 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.572835] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for the task: (returnval){ [ 1771.572835] env[68282]: value = "task-3470589" [ 1771.572835] env[68282]: _type = "Task" [ 1771.572835] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.580214] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Task: {'id': task-3470589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.083057] env[68282]: DEBUG oslo_vmware.exceptions [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1772.083366] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.083919] env[68282]: ERROR nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.083919] env[68282]: Faults: ['InvalidArgument'] [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Traceback (most recent call last): [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] yield resources [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self.driver.spawn(context, instance, image_meta, [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self._fetch_image_if_missing(context, vi) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] image_cache(vi, tmp_image_ds_loc) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] vm_util.copy_virtual_disk( [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] session._wait_for_task(vmdk_copy_task) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return self.wait_for_task(task_ref) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return evt.wait() [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] result = hub.switch() [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return self.greenlet.switch() [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self.f(*self.args, **self.kw) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] raise exceptions.translate_fault(task_info.error) [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Faults: ['InvalidArgument'] [ 1772.083919] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] [ 1772.085146] env[68282]: INFO nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Terminating instance [ 1772.085809] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.086036] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1772.086278] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10dbf22d-9493-424b-92cb-8bf924b74c68 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.088442] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1772.088646] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1772.089356] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953a93da-bb07-4865-9b26-90e818d85575 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.095955] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1772.096183] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1c03368-13c1-4098-b626-0670c04b5f3a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.098430] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1772.098604] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1772.099559] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb6ed625-3a66-4593-9f47-5e7f67eaa1cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.103997] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1772.103997] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52bf162f-1f4b-3a27-f996-f41001f33ff4" [ 1772.103997] env[68282]: _type = "Task" [ 1772.103997] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.112711] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52bf162f-1f4b-3a27-f996-f41001f33ff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.165923] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1772.166131] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1772.166317] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Deleting the datastore file [datastore2] 4340e67d-0b82-4f16-8c49-88886a57523f {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1772.166581] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f475006-17a0-4027-ad71-ebfb99eae7a1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.172319] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for the task: (returnval){ [ 1772.172319] env[68282]: value = "task-3470591" [ 1772.172319] env[68282]: _type = "Task" [ 1772.172319] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.180120] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Task: {'id': task-3470591, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.614465] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1772.614778] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating directory with path [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1772.615032] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52b21ac0-519a-4566-98a0-b9f4186d9b82 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.625332] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created directory with path [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1772.625536] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Fetch image to [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1772.625707] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1772.626431] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae1570a-24b9-4c55-a328-3e87078380d5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.632620] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe82e1b3-e30f-4bc5-8e6d-457c5e4b11cd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.641429] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51ddec5-2297-466f-b56d-a176122680f1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.671218] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480d3743-8084-4621-9f2d-d2310fdc86ca {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.682887] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f8bb617-68f2-4837-9972-f4a9a850727a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.684586] env[68282]: DEBUG oslo_vmware.api [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Task: {'id': task-3470591, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068434} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.684826] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1772.685016] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1772.685221] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1772.685376] env[68282]: INFO nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1772.687514] env[68282]: DEBUG nova.compute.claims [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1772.687707] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.687922] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.707256] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1772.830446] env[68282]: DEBUG oslo_vmware.rw_handles [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1772.892985] env[68282]: DEBUG oslo_vmware.rw_handles [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1772.893201] env[68282]: DEBUG oslo_vmware.rw_handles [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1772.946322] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711d861b-e9b6-480f-b097-2ad6451f1f83 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.953904] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdceb8a7-0120-4dd3-98c2-4993092e94a5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.983891] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb96c7b-bd36-4f09-a764-66a6262b5350 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.990667] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9946fbff-7a7f-41ee-86c2-23465fb99f5a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.003481] env[68282]: DEBUG nova.compute.provider_tree [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.011922] env[68282]: DEBUG nova.scheduler.client.report [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1773.024693] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.025265] env[68282]: ERROR nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1773.025265] env[68282]: Faults: ['InvalidArgument'] [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Traceback (most recent call last): [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self.driver.spawn(context, instance, image_meta, [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self._fetch_image_if_missing(context, vi) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] image_cache(vi, tmp_image_ds_loc) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] vm_util.copy_virtual_disk( [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] session._wait_for_task(vmdk_copy_task) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return self.wait_for_task(task_ref) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return evt.wait() [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] result = hub.switch() [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] return self.greenlet.switch() [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] self.f(*self.args, **self.kw) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] raise exceptions.translate_fault(task_info.error) [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Faults: ['InvalidArgument'] [ 1773.025265] env[68282]: ERROR nova.compute.manager [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] [ 1773.026223] env[68282]: DEBUG nova.compute.utils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1773.027416] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Build of instance 4340e67d-0b82-4f16-8c49-88886a57523f was re-scheduled: A specified parameter was not correct: fileType [ 1773.027416] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1773.027790] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1773.027961] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1773.028148] env[68282]: DEBUG nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1773.028316] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1773.982594] env[68282]: DEBUG nova.network.neutron [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.994967] env[68282]: INFO nova.compute.manager [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Took 0.97 seconds to deallocate network for instance. [ 1774.097426] env[68282]: INFO nova.scheduler.client.report [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Deleted allocations for instance 4340e67d-0b82-4f16-8c49-88886a57523f [ 1774.119186] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e0ad8b-95fd-44bb-b7ca-9db3fa18f10d tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.655s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.120503] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.991s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.120738] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Acquiring lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.120947] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.121139] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.123914] env[68282]: INFO nova.compute.manager [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Terminating instance [ 1774.125700] env[68282]: DEBUG nova.compute.manager [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1774.125933] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1774.126711] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b279e557-4327-4566-84cd-4613e9acc85a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.136841] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c4a67d-80a0-49ff-90d2-af94c0d4112d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.147169] env[68282]: DEBUG nova.compute.manager [None req-02622c8c-6d34-4226-999f-75f1a4cdaafe tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: 8a88813d-7a06-45e0-ae16-b98807cb89c5] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1774.174439] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4340e67d-0b82-4f16-8c49-88886a57523f could not be found. [ 1774.174439] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1774.174439] env[68282]: INFO nova.compute.manager [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1774.174643] env[68282]: DEBUG oslo.service.loopingcall [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.174870] env[68282]: DEBUG nova.compute.manager [None req-02622c8c-6d34-4226-999f-75f1a4cdaafe tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: 8a88813d-7a06-45e0-ae16-b98807cb89c5] Instance disappeared before build. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1774.175783] env[68282]: DEBUG nova.compute.manager [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1774.176311] env[68282]: DEBUG nova.network.neutron [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1774.193772] env[68282]: DEBUG oslo_concurrency.lockutils [None req-02622c8c-6d34-4226-999f-75f1a4cdaafe tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "8a88813d-7a06-45e0-ae16-b98807cb89c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.161s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.204856] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1774.210860] env[68282]: DEBUG nova.network.neutron [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.217773] env[68282]: INFO nova.compute.manager [-] [instance: 4340e67d-0b82-4f16-8c49-88886a57523f] Took 0.04 seconds to deallocate network for instance. [ 1774.277726] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.277726] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.279305] env[68282]: INFO nova.compute.claims [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1774.325721] env[68282]: DEBUG oslo_concurrency.lockutils [None req-54bd8dcb-d77e-4c0b-926a-9c41a34780dd tempest-ServerPasswordTestJSON-490707606 tempest-ServerPasswordTestJSON-490707606-project-member] Lock "4340e67d-0b82-4f16-8c49-88886a57523f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.467594] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c722a1-cb5a-414e-bc6c-e854e2508ada {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.475701] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17be83d-8619-4291-8e32-f6b7a493ed7f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.506448] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbd25c7-0697-47ee-9e60-2ce9d1d638b7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.513779] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eabcaef-f74d-4e83-ac1f-0109785b8fd6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.527442] env[68282]: DEBUG nova.compute.provider_tree [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.537706] env[68282]: DEBUG nova.scheduler.client.report [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1774.552304] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.275s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.552777] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1774.586457] env[68282]: DEBUG nova.compute.utils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1774.587875] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1774.588060] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1774.596411] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1774.644361] env[68282]: DEBUG nova.policy [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5d24947a2404b6580f77b4db1312073', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5460c1581c3d4854b84fa07fe0b509ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1774.668140] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1774.693542] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1774.693901] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1774.694144] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1774.694388] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1774.694578] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1774.694758] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1774.695020] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1774.695258] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1774.695499] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1774.695718] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1774.695949] env[68282]: DEBUG nova.virt.hardware [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1774.696842] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0c8aaf-20f9-410e-b07b-30db1eb7adb5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.705491] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a61fae-4e80-4fb3-bf88-930c1fbdc307 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.722453] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.722603] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.168919] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Successfully created port: 038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1775.943415] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Successfully updated port: 038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1775.954013] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.954984] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquired lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.954984] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1776.013464] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1776.018966] env[68282]: DEBUG nova.compute.manager [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Received event network-vif-plugged-038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1776.019198] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Acquiring lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.019404] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.019572] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.019740] env[68282]: DEBUG nova.compute.manager [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] No waiting events found dispatching network-vif-plugged-038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1776.019937] env[68282]: WARNING nova.compute.manager [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Received unexpected event network-vif-plugged-038053a6-7b44-4d3d-9138-2641ab6d646b for instance with vm_state building and task_state spawning. [ 1776.020165] env[68282]: DEBUG nova.compute.manager [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Received event network-changed-038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1776.020353] env[68282]: DEBUG nova.compute.manager [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Refreshing instance network info cache due to event network-changed-038053a6-7b44-4d3d-9138-2641ab6d646b. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1776.020526] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Acquiring lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.350058] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Updating instance_info_cache with network_info: [{"id": "038053a6-7b44-4d3d-9138-2641ab6d646b", "address": "fa:16:3e:f1:49:e5", "network": {"id": "d9647962-b056-4b7a-af0f-455340675a1e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2043343242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5460c1581c3d4854b84fa07fe0b509ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038053a6-7b", "ovs_interfaceid": "038053a6-7b44-4d3d-9138-2641ab6d646b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.363404] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Releasing lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.363702] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance network_info: |[{"id": "038053a6-7b44-4d3d-9138-2641ab6d646b", "address": "fa:16:3e:f1:49:e5", "network": {"id": "d9647962-b056-4b7a-af0f-455340675a1e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2043343242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5460c1581c3d4854b84fa07fe0b509ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038053a6-7b", "ovs_interfaceid": "038053a6-7b44-4d3d-9138-2641ab6d646b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1776.364008] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Acquired lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.364196] env[68282]: DEBUG nova.network.neutron [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Refreshing network info cache for port 038053a6-7b44-4d3d-9138-2641ab6d646b {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1776.365245] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:49:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '054fcd1e-638e-425a-a1de-78cb188ae026', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '038053a6-7b44-4d3d-9138-2641ab6d646b', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1776.374976] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Creating folder: Project (5460c1581c3d4854b84fa07fe0b509ed). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1776.377832] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1afd0d7-ba3f-44f0-a889-b1e0d35bfee6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.390360] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Created folder: Project (5460c1581c3d4854b84fa07fe0b509ed) in parent group-v693573. [ 1776.390550] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Creating folder: Instances. Parent ref: group-v693667. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1776.390771] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-272c959b-5fd2-4178-bb58-3c7404137a10 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.399296] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Created folder: Instances in parent group-v693667. [ 1776.399518] env[68282]: DEBUG oslo.service.loopingcall [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1776.399718] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1776.399910] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e56fdc7-5ecc-4f76-8242-ee4bb65ea7e0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.417528] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1776.417528] env[68282]: value = "task-3470594" [ 1776.417528] env[68282]: _type = "Task" [ 1776.417528] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.427964] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470594, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.736609] env[68282]: DEBUG nova.network.neutron [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Updated VIF entry in instance network info cache for port 038053a6-7b44-4d3d-9138-2641ab6d646b. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1776.736975] env[68282]: DEBUG nova.network.neutron [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Updating instance_info_cache with network_info: [{"id": "038053a6-7b44-4d3d-9138-2641ab6d646b", "address": "fa:16:3e:f1:49:e5", "network": {"id": "d9647962-b056-4b7a-af0f-455340675a1e", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-2043343242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5460c1581c3d4854b84fa07fe0b509ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "054fcd1e-638e-425a-a1de-78cb188ae026", "external-id": "nsx-vlan-transportzone-658", "segmentation_id": 658, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap038053a6-7b", "ovs_interfaceid": "038053a6-7b44-4d3d-9138-2641ab6d646b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.747473] env[68282]: DEBUG oslo_concurrency.lockutils [req-dc106d0b-872c-4774-bc82-607b6e14c36d req-2d9e4d2b-36a1-4846-aa4f-78ed2421f8db service nova] Releasing lock "refresh_cache-aff4995e-4c8f-4ced-8743-e6cac0484875" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.927487] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470594, 'name': CreateVM_Task, 'duration_secs': 0.275373} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.927663] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1776.928328] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.928491] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.928801] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1776.929051] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94765209-99be-4899-999a-f30ce4c6e567 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.933201] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for the task: (returnval){ [ 1776.933201] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5292a9be-7eb9-cb59-36d3-4bddfd21ba89" [ 1776.933201] env[68282]: _type = "Task" [ 1776.933201] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.940596] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5292a9be-7eb9-cb59-36d3-4bddfd21ba89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.443650] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.443959] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1777.444141] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.637641] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.637966] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.606601] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "aff4995e-4c8f-4ced-8743-e6cac0484875" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.087669] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.099456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.099672] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.099855] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.100025] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1804.101118] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d750d29-49cf-48b8-b927-23cbab86a733 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.110250] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f6967f-1d3d-492d-9404-35fdda18aae8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.125573] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866d3d8b-adf3-4111-8ee7-ae9adc13baa4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.131889] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4bfbbf-07fc-418a-8a94-3c9af3d422a4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.160235] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180907MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1804.160379] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.160565] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.237787] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.237949] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238093] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238219] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238346] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238463] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238671] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238812] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.238931] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.239075] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1804.249580] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1804.260187] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1804.269465] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1804.269680] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1804.269861] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1804.415045] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56df2ffc-781a-4ba3-b9e4-386531e3c79d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.422211] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c8964e-3884-45e1-9676-769fb446e6d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.452463] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd7cf61-7b7d-4366-b276-dbb889ba3137 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.459040] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6128af6-c0bb-4def-80e4-ad3634a573de {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.471538] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.480065] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1804.494500] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1804.494500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.334s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.494635] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1807.495058] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1807.495058] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1807.515126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.515288] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.515424] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.515603] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.515776] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.515910] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.516048] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.516175] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.516298] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.516417] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1807.516539] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1807.517028] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1807.517174] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1809.105969] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1810.087253] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.087035] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.087294] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.086969] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.088228] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.523600] env[68282]: WARNING oslo_vmware.rw_handles [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1821.523600] env[68282]: ERROR oslo_vmware.rw_handles [ 1821.524579] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1821.526267] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1821.526521] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Copying Virtual Disk [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/be422118-7da6-49b5-94b1-976a4ab08c18/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1821.526811] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd403b79-b970-49d2-b4dc-96d5e83c0634 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.534735] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1821.534735] env[68282]: value = "task-3470595" [ 1821.534735] env[68282]: _type = "Task" [ 1821.534735] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.542410] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': task-3470595, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.045183] env[68282]: DEBUG oslo_vmware.exceptions [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1822.045615] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.046246] env[68282]: ERROR nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1822.046246] env[68282]: Faults: ['InvalidArgument'] [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Traceback (most recent call last): [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] yield resources [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.driver.spawn(context, instance, image_meta, [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._fetch_image_if_missing(context, vi) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] image_cache(vi, tmp_image_ds_loc) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] vm_util.copy_virtual_disk( [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] session._wait_for_task(vmdk_copy_task) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.wait_for_task(task_ref) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return evt.wait() [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] result = hub.switch() [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.greenlet.switch() [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.f(*self.args, **self.kw) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] raise exceptions.translate_fault(task_info.error) [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Faults: ['InvalidArgument'] [ 1822.046246] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] [ 1822.047154] env[68282]: INFO nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Terminating instance [ 1822.048427] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.048703] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.048972] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f689fb81-3da7-4c99-976a-3616d1ed74ff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.051109] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.051313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.051512] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1822.058058] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.058235] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1822.059465] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7039bce-b02c-4e9a-ac43-226418168691 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.067042] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1822.067042] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523e5226-8abb-05d3-ef74-621fb3585425" [ 1822.067042] env[68282]: _type = "Task" [ 1822.067042] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.074090] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523e5226-8abb-05d3-ef74-621fb3585425, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.079558] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1822.140077] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.150132] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.150536] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1822.150726] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1822.151774] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea230fa7-7a06-4820-a544-d5582c8b8308 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.160630] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1822.160845] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75e979f9-6416-42f0-9fb0-cd679740ff16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.194501] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1822.194714] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1822.194897] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleting the datastore file [datastore2] 121db530-a9de-4bb9-9d5a-0a88d9587881 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1822.195169] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c870bf6f-ae67-4714-90c7-6a9015ae8e81 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.201191] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1822.201191] env[68282]: value = "task-3470597" [ 1822.201191] env[68282]: _type = "Task" [ 1822.201191] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.208671] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': task-3470597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.577451] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1822.577814] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Creating directory with path [datastore2] vmware_temp/945ac5f2-80cf-4159-b455-7198acc0965b/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.577976] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cd4c90e-f3f4-4e84-8071-5d3a6235122e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.588924] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Created directory with path [datastore2] vmware_temp/945ac5f2-80cf-4159-b455-7198acc0965b/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.589124] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Fetch image to [datastore2] vmware_temp/945ac5f2-80cf-4159-b455-7198acc0965b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1822.589297] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/945ac5f2-80cf-4159-b455-7198acc0965b/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1822.590016] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167e1053-ec9f-4796-8655-59941941b258 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.596660] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a576295b-0426-4398-b219-17dd242cea12 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.605323] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c42aa0-91f8-4040-9e0b-54ab0d0d937f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.635330] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc3c878-8e66-4000-9c1d-98904fec0b53 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.641037] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-44c73575-b3da-4e62-95a3-eec99ad13d1b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.659865] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1822.711515] env[68282]: DEBUG oslo_vmware.api [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': task-3470597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032148} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.711857] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1822.712146] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1822.712381] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1822.712653] env[68282]: INFO nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1822.712970] env[68282]: DEBUG oslo.service.loopingcall [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.713198] env[68282]: DEBUG nova.compute.manager [-] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1822.715432] env[68282]: DEBUG nova.compute.claims [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1822.715535] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.715808] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.795198] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.795856] env[68282]: ERROR nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = getattr(controller, method)(*args, **kwargs) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._get(image_id) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] resp, body = self.http_client.get(url, headers=header) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.request(url, 'GET', **kwargs) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._handle_response(resp) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise exc.from_response(resp, resp.content) [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] During handling of the above exception, another exception occurred: [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] yield resources [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self.driver.spawn(context, instance, image_meta, [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1822.795856] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._fetch_image_if_missing(context, vi) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image_fetch(context, vi, tmp_image_ds_loc) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] images.fetch_image( [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] metadata = IMAGE_API.get(context, image_ref) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return session.show(context, image_id, [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] _reraise_translated_image_exception(image_id) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise new_exc.with_traceback(exc_trace) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = getattr(controller, method)(*args, **kwargs) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._get(image_id) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] resp, body = self.http_client.get(url, headers=header) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.request(url, 'GET', **kwargs) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._handle_response(resp) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise exc.from_response(resp, resp.content) [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1822.796997] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1822.796997] env[68282]: INFO nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Terminating instance [ 1822.797913] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.797913] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.798417] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.798571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.798738] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1822.799626] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3b6a0cd-9a17-4d4d-ab22-dc22705431a8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.811055] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.811055] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1822.811646] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aa4adef-fc33-4ce9-bd98-237bb4b2709a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.818819] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for the task: (returnval){ [ 1822.818819] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52cdfb04-1f4f-1b97-717d-56507ffbbbb6" [ 1822.818819] env[68282]: _type = "Task" [ 1822.818819] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.826232] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52cdfb04-1f4f-1b97-717d-56507ffbbbb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.828853] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1822.886227] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.895452] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.895900] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1822.896097] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1822.897223] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6685a17-6197-4fd6-8c72-57253867eaf4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.907262] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1822.907495] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-259c05e6-d775-447f-9265-ebd5d1881174 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.913233] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a68bcc-ad40-40bd-a7aa-41b9bab4524a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.920077] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dbce19-d4dd-41de-9543-4f8d2227cb43 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.950805] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add309f4-829c-4d35-bd30-331d1b5b9e36 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.953062] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1822.953264] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1822.953438] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleting the datastore file [datastore2] a1dd42c9-5466-46fa-911d-c3307ae3cf9c {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1822.953667] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ac64a14-997e-4b2c-8c64-77e52be62e83 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.959797] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98eb5d7c-7b61-4194-9c3a-c2b8ffa680f3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.964192] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for the task: (returnval){ [ 1822.964192] env[68282]: value = "task-3470599" [ 1822.964192] env[68282]: _type = "Task" [ 1822.964192] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.974402] env[68282]: DEBUG nova.compute.provider_tree [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.979585] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': task-3470599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.986860] env[68282]: DEBUG nova.scheduler.client.report [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1822.997628] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.282s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.998175] env[68282]: ERROR nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1822.998175] env[68282]: Faults: ['InvalidArgument'] [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Traceback (most recent call last): [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.driver.spawn(context, instance, image_meta, [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._fetch_image_if_missing(context, vi) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] image_cache(vi, tmp_image_ds_loc) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] vm_util.copy_virtual_disk( [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] session._wait_for_task(vmdk_copy_task) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.wait_for_task(task_ref) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return evt.wait() [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] result = hub.switch() [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.greenlet.switch() [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.f(*self.args, **self.kw) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] raise exceptions.translate_fault(task_info.error) [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Faults: ['InvalidArgument'] [ 1822.998175] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] [ 1822.998994] env[68282]: DEBUG nova.compute.utils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1823.000484] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Build of instance 121db530-a9de-4bb9-9d5a-0a88d9587881 was re-scheduled: A specified parameter was not correct: fileType [ 1823.000484] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1823.000868] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1823.001110] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.001262] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.001423] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1823.027765] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1823.120355] env[68282]: DEBUG nova.network.neutron [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.130151] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.130374] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1823.130558] env[68282]: DEBUG nova.compute.manager [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1823.217581] env[68282]: INFO nova.scheduler.client.report [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleted allocations for instance 121db530-a9de-4bb9-9d5a-0a88d9587881 [ 1823.240013] env[68282]: DEBUG oslo_concurrency.lockutils [None req-302b3a55-d099-4ae1-ae92-ed445346c48d tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.848s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.241048] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 235.512s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.241272] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "121db530-a9de-4bb9-9d5a-0a88d9587881-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.241493] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.241667] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.243570] env[68282]: INFO nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Terminating instance [ 1823.245058] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.245220] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.245407] env[68282]: DEBUG nova.network.neutron [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1823.250299] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1823.270976] env[68282]: DEBUG nova.network.neutron [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1823.314619] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.314876] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.316718] env[68282]: INFO nova.compute.claims [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1823.327956] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1823.328238] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Creating directory with path [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1823.328494] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fb236e2-a14e-49b9-99af-b376cb1fa47b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.341058] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Created directory with path [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1823.341254] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Fetch image to [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1823.341424] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1823.342215] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a991b3d6-83c9-4648-b036-ad8fe346b0eb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.348787] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc90d95f-9677-4b05-91e3-4fb62587f1c2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.360705] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f92346-ea90-43b6-9352-59d081c60d6b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.365183] env[68282]: DEBUG nova.network.neutron [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.399626] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-121db530-a9de-4bb9-9d5a-0a88d9587881" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.400023] env[68282]: DEBUG nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1823.400208] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1823.400931] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30a04aa-77b7-4c8d-95f1-584092ecae0f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.404383] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a907c593-1d90-4d24-98c4-444d3d0dde7d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.409695] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3df2431b-7a1e-4b90-a8b5-54dc52b79cea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.414117] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dc2db6-72c1-4fbe-9ce1-53d26daea04f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.446711] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 121db530-a9de-4bb9-9d5a-0a88d9587881 could not be found. [ 1823.447721] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1823.447721] env[68282]: INFO nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1823.447721] env[68282]: DEBUG oslo.service.loopingcall [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.448836] env[68282]: DEBUG nova.compute.manager [-] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1823.452805] env[68282]: DEBUG nova.network.neutron [-] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1823.452805] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1823.476178] env[68282]: DEBUG oslo_vmware.api [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Task: {'id': task-3470599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039103} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.476652] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1823.476816] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1823.477036] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1823.477253] env[68282]: INFO nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Took 0.58 seconds to destroy the instance on the hypervisor. [ 1823.477631] env[68282]: DEBUG oslo.service.loopingcall [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.477820] env[68282]: DEBUG nova.compute.manager [-] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1823.480218] env[68282]: DEBUG nova.compute.claims [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1823.480416] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.513543] env[68282]: DEBUG oslo_vmware.rw_handles [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1823.576045] env[68282]: DEBUG oslo_vmware.rw_handles [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1823.576094] env[68282]: DEBUG oslo_vmware.rw_handles [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1823.614235] env[68282]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1823.614507] env[68282]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-45c478fb-591a-4caf-8ac1-24a58979279a'] [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1823.615023] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1823.616402] env[68282]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1823.616402] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1823.616402] env[68282]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1823.616402] env[68282]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.616402] env[68282]: ERROR oslo.service.loopingcall [ 1823.616402] env[68282]: ERROR nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.619450] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb02628a-4dfc-4312-9fb0-87f4366c2827 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.626846] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8acc00-d6d9-492d-a311-5443a113f37d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.656780] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c156f9b-4ff7-4e3b-b730-123f563eb617 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.660351] env[68282]: ERROR nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Traceback (most recent call last): [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] exception_handler_v20(status_code, error_body) [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] raise client_exc(message=error_message, [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Neutron server returns request_ids: ['req-45c478fb-591a-4caf-8ac1-24a58979279a'] [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] During handling of the above exception, another exception occurred: [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Traceback (most recent call last): [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._delete_instance(context, instance, bdms) [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._shutdown_instance(context, instance, bdms) [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._try_deallocate_network(context, instance, requested_networks) [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] with excutils.save_and_reraise_exception(): [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.force_reraise() [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] raise self.value [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] _deallocate_network_with_retries() [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return evt.wait() [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1823.660351] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] result = hub.switch() [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.greenlet.switch() [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] result = func(*self.args, **self.kw) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] result = f(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._deallocate_network( [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self.network_api.deallocate_for_instance( [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] data = neutron.list_ports(**search_opts) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.list('ports', self.ports_path, retrieve_all, [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] for r in self._pagination(collection, path, **params): [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] res = self.get(path, params=params) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.retry_request("GET", action, body=body, [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] return self.do_request(method, action, body=body, [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] ret = obj(*args, **kwargs) [ 1823.661465] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1823.663113] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] self._handle_fault_response(status_code, replybody, resp) [ 1823.663113] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1823.663113] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1823.663113] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.663113] env[68282]: ERROR nova.compute.manager [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] [ 1823.667526] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9830ad4-eb0c-47b6-9fd5-8e599be9133c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.682159] env[68282]: DEBUG nova.compute.provider_tree [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.687694] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "121db530-a9de-4bb9-9d5a-0a88d9587881" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.447s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.690345] env[68282]: DEBUG nova.scheduler.client.report [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1823.705524] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.391s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.706063] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1823.708701] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.228s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.747653] env[68282]: DEBUG nova.compute.utils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1823.748654] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1823.748824] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1823.757798] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1823.760875] env[68282]: INFO nova.compute.manager [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: 121db530-a9de-4bb9-9d5a-0a88d9587881] Successfully reverted task state from None on failure for instance. [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server [None req-e34de745-a494-472e-bba5-5d3c7144132b tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-45c478fb-591a-4caf-8ac1-24a58979279a'] [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.766691] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1823.767902] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1823.769077] env[68282]: ERROR oslo_messaging.rpc.server [ 1823.815028] env[68282]: DEBUG nova.policy [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aee8a1e035742e0b67873bfcce2ef72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3d2a3fac4b04f8fa6622043de5e500d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1823.823411] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1823.845501] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1823.845786] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1823.845953] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.846150] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1823.846303] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.846450] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1823.846658] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1823.846823] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1823.846994] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1823.847189] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1823.847363] env[68282]: DEBUG nova.virt.hardware [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1823.848248] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d55f944-a1fb-49d1-9888-c687387b652a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.856969] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2185230-b639-434b-8fd2-a39c440ac365 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.911878] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5656c3-87c9-403b-9c3c-548963ad67ac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.918667] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5833d62c-2de7-4f5d-bf37-fda4cf8f8117 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.949242] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b83329-4391-446a-a763-6ee2fc603a47 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.956305] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb1be30-cd8f-4884-ae1f-a46acd381614 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.969518] env[68282]: DEBUG nova.compute.provider_tree [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.977802] env[68282]: DEBUG nova.scheduler.client.report [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1823.991460] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.283s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.992935] env[68282]: ERROR nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = getattr(controller, method)(*args, **kwargs) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._get(image_id) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] resp, body = self.http_client.get(url, headers=header) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.request(url, 'GET', **kwargs) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._handle_response(resp) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise exc.from_response(resp, resp.content) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] During handling of the above exception, another exception occurred: [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self.driver.spawn(context, instance, image_meta, [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._fetch_image_if_missing(context, vi) [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1823.992935] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image_fetch(context, vi, tmp_image_ds_loc) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] images.fetch_image( [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] metadata = IMAGE_API.get(context, image_ref) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return session.show(context, image_id, [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] _reraise_translated_image_exception(image_id) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise new_exc.with_traceback(exc_trace) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = getattr(controller, method)(*args, **kwargs) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._get(image_id) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] resp, body = self.http_client.get(url, headers=header) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.request(url, 'GET', **kwargs) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self._handle_response(resp) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise exc.from_response(resp, resp.content) [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1823.993739] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1823.993739] env[68282]: DEBUG nova.compute.utils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1823.996446] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Build of instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c was re-scheduled: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1823.996916] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1823.997161] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.997313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.997474] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1824.021301] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1824.085290] env[68282]: DEBUG nova.network.neutron [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.094285] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.094508] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1824.094688] env[68282]: DEBUG nova.compute.manager [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1824.199684] env[68282]: INFO nova.scheduler.client.report [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Deleted allocations for instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c [ 1824.222289] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9fd387b9-d5e1-44c8-8a60-ceb7b4d55ea6 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.286s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.222289] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.642s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.222289] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.222289] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.222289] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.224271] env[68282]: INFO nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Terminating instance [ 1824.226234] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquiring lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.226568] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Acquired lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.227194] env[68282]: DEBUG nova.network.neutron [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1824.236377] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Successfully created port: c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1824.241115] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1824.260781] env[68282]: DEBUG nova.network.neutron [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1824.296682] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.296967] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.298974] env[68282]: INFO nova.compute.claims [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1824.334994] env[68282]: DEBUG nova.network.neutron [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.343787] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Releasing lock "refresh_cache-a1dd42c9-5466-46fa-911d-c3307ae3cf9c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.345726] env[68282]: DEBUG nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1824.346173] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1824.347040] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fac8a9bc-6058-4955-8c6c-a4bb22a7c842 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.359069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7707fcdd-46f1-46b3-a36a-3daf0acec083 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.391699] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1dd42c9-5466-46fa-911d-c3307ae3cf9c could not be found. [ 1824.391936] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1824.392244] env[68282]: INFO nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1824.392551] env[68282]: DEBUG oslo.service.loopingcall [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.392816] env[68282]: DEBUG nova.compute.manager [-] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1824.392939] env[68282]: DEBUG nova.network.neutron [-] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1824.498286] env[68282]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1824.498538] env[68282]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-62805cc7-41e8-498a-9c00-729f9c157837'] [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1824.499087] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1824.500293] env[68282]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1824.500293] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1824.500293] env[68282]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1824.500293] env[68282]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.500293] env[68282]: ERROR oslo.service.loopingcall [ 1824.500293] env[68282]: ERROR nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.540832] env[68282]: ERROR nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] exception_handler_v20(status_code, error_body) [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise client_exc(message=error_message, [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Neutron server returns request_ids: ['req-62805cc7-41e8-498a-9c00-729f9c157837'] [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] During handling of the above exception, another exception occurred: [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Traceback (most recent call last): [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._delete_instance(context, instance, bdms) [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._shutdown_instance(context, instance, bdms) [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._try_deallocate_network(context, instance, requested_networks) [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] with excutils.save_and_reraise_exception(): [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self.force_reraise() [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise self.value [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] _deallocate_network_with_retries() [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return evt.wait() [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1824.540832] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = hub.switch() [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.greenlet.switch() [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = func(*self.args, **self.kw) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] result = f(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._deallocate_network( [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self.network_api.deallocate_for_instance( [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] data = neutron.list_ports(**search_opts) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.list('ports', self.ports_path, retrieve_all, [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] for r in self._pagination(collection, path, **params): [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] res = self.get(path, params=params) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.retry_request("GET", action, body=body, [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] return self.do_request(method, action, body=body, [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] ret = obj(*args, **kwargs) [ 1824.541939] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1824.542907] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] self._handle_fault_response(status_code, replybody, resp) [ 1824.542907] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1824.542907] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1824.542907] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.542907] env[68282]: ERROR nova.compute.manager [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] [ 1824.548322] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d949fac0-2d4b-438a-9276-ea32fc4f911c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.556740] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167c29f0-6ffe-46a4-b7a8-cb9b0eabbdb8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.588170] env[68282]: DEBUG oslo_concurrency.lockutils [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Lock "a1dd42c9-5466-46fa-911d-c3307ae3cf9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.367s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.590116] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a59ea1c-60e5-4ae1-8ef1-95c318d7f5e4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.598673] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02992386-99f4-43f9-a1fb-f880bc65c62c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.614978] env[68282]: DEBUG nova.compute.provider_tree [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.623285] env[68282]: DEBUG nova.scheduler.client.report [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1824.641668] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.345s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.642175] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1824.655934] env[68282]: INFO nova.compute.manager [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] [instance: a1dd42c9-5466-46fa-911d-c3307ae3cf9c] Successfully reverted task state from None on failure for instance. [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server [None req-03025d34-9e94-4a7e-ba36-23fb65fb6854 tempest-ServerShowV247Test-547145538 tempest-ServerShowV247Test-547145538-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-62805cc7-41e8-498a-9c00-729f9c157837'] [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.659180] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1824.660498] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1824.661787] env[68282]: ERROR oslo_messaging.rpc.server [ 1824.674526] env[68282]: DEBUG nova.compute.utils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1824.675693] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1824.675873] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1824.690019] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1824.735673] env[68282]: DEBUG nova.policy [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd583e11a73b4a51abbada9d7175b77b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ad1efd6d477415b87929a724ff84973', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1824.763372] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1824.792345] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1824.792604] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1824.792873] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.793077] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1824.793227] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.793371] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1824.793575] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1824.793727] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1824.793886] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1824.794570] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1824.794810] env[68282]: DEBUG nova.virt.hardware [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1824.795692] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bf5c51-1c9a-419e-8de2-d9f28c8d4b44 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.804125] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa03d93-f9eb-4e28-aa1b-388467ff551f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.073349] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Successfully created port: 39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1825.149376] env[68282]: DEBUG nova.compute.manager [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Received event network-vif-plugged-c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1825.149587] env[68282]: DEBUG oslo_concurrency.lockutils [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] Acquiring lock "423ea779-232c-4662-acbd-9d2edec5867b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.149808] env[68282]: DEBUG oslo_concurrency.lockutils [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] Lock "423ea779-232c-4662-acbd-9d2edec5867b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.149980] env[68282]: DEBUG oslo_concurrency.lockutils [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] Lock "423ea779-232c-4662-acbd-9d2edec5867b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.150365] env[68282]: DEBUG nova.compute.manager [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] No waiting events found dispatching network-vif-plugged-c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1825.150552] env[68282]: WARNING nova.compute.manager [req-3ce5eab8-9f1e-4752-98f3-c31d0f270e0f req-a5d55ba8-f659-4bf1-8e12-10d02baf6cb2 service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Received unexpected event network-vif-plugged-c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 for instance with vm_state building and task_state spawning. [ 1825.227295] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Successfully updated port: c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1825.246592] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.246754] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.246908] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1825.304246] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1825.571441] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Updating instance_info_cache with network_info: [{"id": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "address": "fa:16:3e:ed:90:3d", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc88e2bb0-dd", "ovs_interfaceid": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.586916] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.587263] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance network_info: |[{"id": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "address": "fa:16:3e:ed:90:3d", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc88e2bb0-dd", "ovs_interfaceid": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1825.587812] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:90:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c88e2bb0-ddf4-4cd9-99df-47d24051e4f9', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.595724] env[68282]: DEBUG oslo.service.loopingcall [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.597845] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1825.598103] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e924890c-5169-4b29-ac5a-76b460188334 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.620776] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.620776] env[68282]: value = "task-3470600" [ 1825.620776] env[68282]: _type = "Task" [ 1825.620776] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.628975] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470600, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.846253] env[68282]: DEBUG nova.compute.manager [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Received event network-vif-plugged-39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1825.846488] env[68282]: DEBUG oslo_concurrency.lockutils [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] Acquiring lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.846702] env[68282]: DEBUG oslo_concurrency.lockutils [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.846873] env[68282]: DEBUG oslo_concurrency.lockutils [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.847294] env[68282]: DEBUG nova.compute.manager [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] No waiting events found dispatching network-vif-plugged-39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1825.847577] env[68282]: WARNING nova.compute.manager [req-5a2be396-9656-4726-9831-fd6ad8c11db6 req-6470b926-ea33-45bd-9327-b3ea9615dd96 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Received unexpected event network-vif-plugged-39dd5b9e-983f-4a41-92fa-0b2369ba7f2c for instance with vm_state building and task_state spawning. [ 1825.894290] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Successfully updated port: 39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1825.905816] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.905968] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquired lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.906160] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1825.947865] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1826.109658] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Updating instance_info_cache with network_info: [{"id": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "address": "fa:16:3e:e9:4f:40", "network": {"id": "0c3d70ae-5d2d-46a5-b2b7-4261bf9375dd", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1236679250-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ad1efd6d477415b87929a724ff84973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39dd5b9e-98", "ovs_interfaceid": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.126925] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Releasing lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.127255] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance network_info: |[{"id": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "address": "fa:16:3e:e9:4f:40", "network": {"id": "0c3d70ae-5d2d-46a5-b2b7-4261bf9375dd", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1236679250-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ad1efd6d477415b87929a724ff84973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39dd5b9e-98", "ovs_interfaceid": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1826.127677] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:4f:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39dd5b9e-983f-4a41-92fa-0b2369ba7f2c', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1826.134939] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Creating folder: Project (5ad1efd6d477415b87929a724ff84973). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1826.138478] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-945af89e-41f7-4582-8f26-1d355d83ce8b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.140149] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470600, 'name': CreateVM_Task, 'duration_secs': 0.286139} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.140305] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1826.141456] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.141621] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.141946] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.142200] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6395a970-69dc-4cfd-a3f6-d580347d07ab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.146780] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1826.146780] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c91edd-dd76-ed52-9455-8d6ae1cc90ad" [ 1826.146780] env[68282]: _type = "Task" [ 1826.146780] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.147941] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Created folder: Project (5ad1efd6d477415b87929a724ff84973) in parent group-v693573. [ 1826.148130] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Creating folder: Instances. Parent ref: group-v693671. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1826.151562] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07d0b7f-d108-4407-bc55-e8d0612df1d4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.157023] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52c91edd-dd76-ed52-9455-8d6ae1cc90ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.158969] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Created folder: Instances in parent group-v693671. [ 1826.159210] env[68282]: DEBUG oslo.service.loopingcall [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1826.159387] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1826.159577] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-480fe73b-eefa-42d6-b114-628fe20beb39 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.177212] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1826.177212] env[68282]: value = "task-3470603" [ 1826.177212] env[68282]: _type = "Task" [ 1826.177212] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.185685] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470603, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.657876] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.658177] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.658358] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.685477] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470603, 'name': CreateVM_Task, 'duration_secs': 0.27529} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.685621] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1826.686241] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.686402] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.686701] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.686931] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-083f2575-73ee-42e1-a7ea-327a2b4373e1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.690895] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for the task: (returnval){ [ 1826.690895] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ddfa5d-f618-2a1f-d16f-5f4cc98cb27e" [ 1826.690895] env[68282]: _type = "Task" [ 1826.690895] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.698100] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52ddfa5d-f618-2a1f-d16f-5f4cc98cb27e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.174971] env[68282]: DEBUG nova.compute.manager [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Received event network-changed-c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1827.175184] env[68282]: DEBUG nova.compute.manager [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Refreshing instance network info cache due to event network-changed-c88e2bb0-ddf4-4cd9-99df-47d24051e4f9. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1827.175402] env[68282]: DEBUG oslo_concurrency.lockutils [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] Acquiring lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.175540] env[68282]: DEBUG oslo_concurrency.lockutils [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] Acquired lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.175713] env[68282]: DEBUG nova.network.neutron [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Refreshing network info cache for port c88e2bb0-ddf4-4cd9-99df-47d24051e4f9 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1827.201192] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.201346] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1827.201563] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.441474] env[68282]: DEBUG nova.network.neutron [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Updated VIF entry in instance network info cache for port c88e2bb0-ddf4-4cd9-99df-47d24051e4f9. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1827.441839] env[68282]: DEBUG nova.network.neutron [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Updating instance_info_cache with network_info: [{"id": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "address": "fa:16:3e:ed:90:3d", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc88e2bb0-dd", "ovs_interfaceid": "c88e2bb0-ddf4-4cd9-99df-47d24051e4f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.451779] env[68282]: DEBUG oslo_concurrency.lockutils [req-3f0ebfff-b576-4f24-82c0-4c0b8bb99e9c req-5e32d1e7-cc72-440e-aa1b-4f618784e81a service nova] Releasing lock "refresh_cache-423ea779-232c-4662-acbd-9d2edec5867b" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.872992] env[68282]: DEBUG nova.compute.manager [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Received event network-changed-39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1827.873258] env[68282]: DEBUG nova.compute.manager [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Refreshing instance network info cache due to event network-changed-39dd5b9e-983f-4a41-92fa-0b2369ba7f2c. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1827.873434] env[68282]: DEBUG oslo_concurrency.lockutils [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] Acquiring lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.873580] env[68282]: DEBUG oslo_concurrency.lockutils [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] Acquired lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1827.873743] env[68282]: DEBUG nova.network.neutron [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Refreshing network info cache for port 39dd5b9e-983f-4a41-92fa-0b2369ba7f2c {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1828.132388] env[68282]: DEBUG nova.network.neutron [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Updated VIF entry in instance network info cache for port 39dd5b9e-983f-4a41-92fa-0b2369ba7f2c. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1828.132751] env[68282]: DEBUG nova.network.neutron [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Updating instance_info_cache with network_info: [{"id": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "address": "fa:16:3e:e9:4f:40", "network": {"id": "0c3d70ae-5d2d-46a5-b2b7-4261bf9375dd", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1236679250-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ad1efd6d477415b87929a724ff84973", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39dd5b9e-98", "ovs_interfaceid": "39dd5b9e-983f-4a41-92fa-0b2369ba7f2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.142520] env[68282]: DEBUG oslo_concurrency.lockutils [req-678f5a0b-fe88-41a3-8b3b-2f7730a0e720 req-b1dbf602-d252-4579-951f-590257ab7889 service nova] Releasing lock "refresh_cache-e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.087731] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1864.103017] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.103017] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.103017] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.103017] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1864.105314] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f912343-8211-4ca4-baea-fe138767b466 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.114102] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d5ff5a-ba6b-4ea1-8d2d-16755a33e1e7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.129319] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a34d471-d38a-4fae-9420-8907968dbeaa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.135766] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b01b8c-e334-415e-a11c-01fdc59f24b6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.163971] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180946MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1864.164141] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.164341] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.243149] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243327] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 9874370f-917a-412b-91ce-a92e73d6ac0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243458] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243582] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243702] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243823] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.243940] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.244068] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.244187] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.244302] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1864.256303] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1864.256521] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1864.256666] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1864.392728] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174f6850-0cc7-42d6-a093-ef72bef2c527 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.400240] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23e0d5a-8314-45a5-854f-0198a437af14 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.431106] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7b384e-b3a4-4107-bb87-52284ae04daa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.438212] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7028e3c-f2ba-4bb7-adf6-00ace930a9af {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.451183] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1864.463585] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1864.477248] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1864.477443] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.313s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.089073] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1866.089073] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 1866.100683] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 1866.100894] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1867.106661] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1867.106967] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1867.106967] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1867.126891] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127028] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127160] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127289] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127414] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127537] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127658] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127779] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.127940] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.128085] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1867.128213] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1868.087516] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.107882] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1868.107882] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1870.104054] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1871.087564] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1871.098826] env[68282]: WARNING oslo_vmware.rw_handles [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1871.098826] env[68282]: ERROR oslo_vmware.rw_handles [ 1871.099179] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1871.101352] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1871.101632] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Copying Virtual Disk [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/dda0121c-2cd5-4fec-945a-2305d13839d7/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1871.101975] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0866349-ec0a-4dc7-be8a-3b395f8fd5a7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.110431] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for the task: (returnval){ [ 1871.110431] env[68282]: value = "task-3470604" [ 1871.110431] env[68282]: _type = "Task" [ 1871.110431] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.118847] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Task: {'id': task-3470604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.620522] env[68282]: DEBUG oslo_vmware.exceptions [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1871.620763] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.621336] env[68282]: ERROR nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1871.621336] env[68282]: Faults: ['InvalidArgument'] [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Traceback (most recent call last): [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] yield resources [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self.driver.spawn(context, instance, image_meta, [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self._fetch_image_if_missing(context, vi) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] image_cache(vi, tmp_image_ds_loc) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] vm_util.copy_virtual_disk( [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] session._wait_for_task(vmdk_copy_task) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return self.wait_for_task(task_ref) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return evt.wait() [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] result = hub.switch() [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return self.greenlet.switch() [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self.f(*self.args, **self.kw) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] raise exceptions.translate_fault(task_info.error) [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Faults: ['InvalidArgument'] [ 1871.621336] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] [ 1871.622284] env[68282]: INFO nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Terminating instance [ 1871.623254] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.623461] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1871.623694] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-408d2ea4-53bf-46bf-809f-31709b9dba32 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.626041] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1871.626245] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1871.626950] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b15a4d-a43f-4e26-87ad-5275d4062d3f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.633387] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1871.633595] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef29b46f-b5df-4fa7-ae2f-3b25ee187941 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.635631] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1871.635877] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1871.636795] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb84f83a-5938-457d-a128-6b35f8cb02e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.642536] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1871.642536] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52191d89-df5e-0be5-e26c-512ba05180a8" [ 1871.642536] env[68282]: _type = "Task" [ 1871.642536] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.649514] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52191d89-df5e-0be5-e26c-512ba05180a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.727312] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1871.727630] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1871.727952] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Deleting the datastore file [datastore2] 16824286-3e71-4f49-8a6e-93f10ec668d6 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.728274] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-740849db-31b0-4b88-b71a-461a1fbd32df {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.735174] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for the task: (returnval){ [ 1871.735174] env[68282]: value = "task-3470606" [ 1871.735174] env[68282]: _type = "Task" [ 1871.735174] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.743060] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Task: {'id': task-3470606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.088350] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.088350] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.088350] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.154031] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1872.154031] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1872.154031] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68ad1ba8-4291-46ff-a908-268a10cc5d6d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.164885] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1872.165099] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Fetch image to [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1872.165281] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1872.166024] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4674ac53-d224-4ede-92af-3715c79be157 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.172659] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce75bfb-3210-4318-9176-9c3dbc78407d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.181660] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965c4d94-4761-4201-ac5d-709d05488fe5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.211991] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12b0013-cca7-414a-8c20-492c0923150c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.217871] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-898657cb-077d-49b2-a904-189f8cc1ac57 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.242084] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1872.246990] env[68282]: DEBUG oslo_vmware.api [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Task: {'id': task-3470606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067362} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.247335] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1872.247525] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1872.247702] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1872.248199] env[68282]: INFO nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1872.249908] env[68282]: DEBUG nova.compute.claims [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1872.250099] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.250312] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.295655] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1872.356258] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1872.356462] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1872.474815] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c0085d-2fd7-4277-a682-a575a1d671d0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.482250] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8fbcb3-903f-449c-b055-b7f37f6df14b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.513722] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6a006b-6451-4125-be11-6b2899fdc833 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.522073] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb410f9-6bc0-42dc-a88e-8be3f3cfba1a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.534877] env[68282]: DEBUG nova.compute.provider_tree [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.546861] env[68282]: DEBUG nova.scheduler.client.report [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1872.562434] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.562997] env[68282]: ERROR nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1872.562997] env[68282]: Faults: ['InvalidArgument'] [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Traceback (most recent call last): [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self.driver.spawn(context, instance, image_meta, [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self._fetch_image_if_missing(context, vi) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] image_cache(vi, tmp_image_ds_loc) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] vm_util.copy_virtual_disk( [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] session._wait_for_task(vmdk_copy_task) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return self.wait_for_task(task_ref) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return evt.wait() [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] result = hub.switch() [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] return self.greenlet.switch() [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] self.f(*self.args, **self.kw) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] raise exceptions.translate_fault(task_info.error) [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Faults: ['InvalidArgument'] [ 1872.562997] env[68282]: ERROR nova.compute.manager [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] [ 1872.564075] env[68282]: DEBUG nova.compute.utils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1872.565142] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Build of instance 16824286-3e71-4f49-8a6e-93f10ec668d6 was re-scheduled: A specified parameter was not correct: fileType [ 1872.565142] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1872.565515] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1872.565685] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1872.565860] env[68282]: DEBUG nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1872.566034] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1872.942045] env[68282]: DEBUG nova.network.neutron [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.955765] env[68282]: INFO nova.compute.manager [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Took 0.39 seconds to deallocate network for instance. [ 1873.049063] env[68282]: INFO nova.scheduler.client.report [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Deleted allocations for instance 16824286-3e71-4f49-8a6e-93f10ec668d6 [ 1873.067792] env[68282]: DEBUG oslo_concurrency.lockutils [None req-acb47bd9-e86c-4c7a-aae0-1570d078f0e8 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 620.055s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.068976] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 424.735s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.069246] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Acquiring lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.069465] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.069647] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.071666] env[68282]: INFO nova.compute.manager [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Terminating instance [ 1873.074179] env[68282]: DEBUG nova.compute.manager [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1873.074432] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1873.074944] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-945ff8d8-7584-4c5c-862c-d9fd1748b1c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.084476] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4055b3-36c4-4bd1-9084-5146afabcfd3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.096269] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1873.096578] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1873.117753] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 16824286-3e71-4f49-8a6e-93f10ec668d6 could not be found. [ 1873.118012] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1873.118213] env[68282]: INFO nova.compute.manager [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1873.118466] env[68282]: DEBUG oslo.service.loopingcall [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.118701] env[68282]: DEBUG nova.compute.manager [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1873.118799] env[68282]: DEBUG nova.network.neutron [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1873.144813] env[68282]: DEBUG nova.network.neutron [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.158305] env[68282]: INFO nova.compute.manager [-] [instance: 16824286-3e71-4f49-8a6e-93f10ec668d6] Took 0.04 seconds to deallocate network for instance. [ 1873.174399] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.174661] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.177218] env[68282]: INFO nova.compute.claims [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1873.265806] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4356f6ab-9dfe-430c-9362-6a9ed5603aa6 tempest-ServerActionsTestOtherB-1326038298 tempest-ServerActionsTestOtherB-1326038298-project-member] Lock "16824286-3e71-4f49-8a6e-93f10ec668d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.364048] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912b79b2-8dbb-4792-8e4d-3692b485cfb8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.372439] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46e89b0-dfcd-4297-8deb-824fc46a477d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.402235] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107bc112-a0b8-4de9-a890-e31403b947c1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.409620] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939d3e6b-b5e1-45c3-927a-1efff0e57119 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.423992] env[68282]: DEBUG nova.compute.provider_tree [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.432602] env[68282]: DEBUG nova.scheduler.client.report [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1873.447210] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.447657] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1873.485336] env[68282]: DEBUG nova.compute.utils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.486686] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1873.495939] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1873.558587] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1873.585483] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1873.585732] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1873.585934] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1873.586150] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1873.586302] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1873.586452] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1873.586663] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1873.586824] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1873.586995] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1873.587188] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1873.587368] env[68282]: DEBUG nova.virt.hardware [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1873.588285] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7b8d4b-461e-4cb2-a187-badde13f2222 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.596169] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc8147c-05be-4a9a-aa93-6cdd652aadaf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.610135] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1873.615663] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Creating folder: Project (0c27ee8b3416478bb357525ae675ff65). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1873.615970] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9450b632-71dd-4e28-864c-9c8a062e8143 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.626034] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Created folder: Project (0c27ee8b3416478bb357525ae675ff65) in parent group-v693573. [ 1873.626237] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Creating folder: Instances. Parent ref: group-v693674. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1873.626455] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c159f9f4-ade6-4257-a835-ee6452b173f7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.634203] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Created folder: Instances in parent group-v693674. [ 1873.634430] env[68282]: DEBUG oslo.service.loopingcall [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1873.634609] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1873.634795] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbad0470-4501-4fe5-9fc9-5446160d5f6a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.650246] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1873.650246] env[68282]: value = "task-3470609" [ 1873.650246] env[68282]: _type = "Task" [ 1873.650246] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.657265] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470609, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.160860] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470609, 'name': CreateVM_Task, 'duration_secs': 0.244102} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.161313] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1874.161509] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.161685] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.162035] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1874.162291] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82ed239b-41d2-4a17-aed6-5fdc29d91a20 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.166662] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for the task: (returnval){ [ 1874.166662] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5239c549-f8ea-2bba-4081-03ef362f59b2" [ 1874.166662] env[68282]: _type = "Task" [ 1874.166662] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.179369] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5239c549-f8ea-2bba-4081-03ef362f59b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.677756] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.677756] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1874.677756] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.087620] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.087907] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 1893.555024] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_power_states {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.580636] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 1893.580636] env[68282]: value = "domain-c8" [ 1893.580636] env[68282]: _type = "ClusterComputeResource" [ 1893.580636] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1893.581888] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e764ba8-a23d-4345-923c-673905335f62 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.598788] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 10 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1893.598961] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 9874370f-917a-412b-91ce-a92e73d6ac0d {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.599220] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 0e8afd42-0759-41c0-892a-c4f852d5d3e4 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.599391] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid a9d0de25-ef21-4725-a6c1-f6fac2593bb9 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.599548] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 7bc5117e-58d1-4c08-b778-7045b1076b94 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.599701] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 5c2d229f-e14c-43b8-80d1-9232557b7520 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.599854] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.600012] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid aff4995e-4c8f-4ced-8743-e6cac0484875 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.600174] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 423ea779-232c-4662-acbd-9d2edec5867b {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.600324] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.600470] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 1893.600799] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "9874370f-917a-412b-91ce-a92e73d6ac0d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.601045] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.601273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.601474] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "7bc5117e-58d1-4c08-b778-7045b1076b94" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.601810] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "5c2d229f-e14c-43b8-80d1-9232557b7520" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.601893] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.602063] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "aff4995e-4c8f-4ced-8743-e6cac0484875" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.602269] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "423ea779-232c-4662-acbd-9d2edec5867b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.602460] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.602651] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.990074] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.990388] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.528738] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "423ea779-232c-4662-acbd-9d2edec5867b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.561035] env[68282]: WARNING oslo_vmware.rw_handles [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1921.561035] env[68282]: ERROR oslo_vmware.rw_handles [ 1921.561035] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1921.563246] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1921.563498] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Copying Virtual Disk [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/57c6bb0d-ea04-491b-8735-099877e7d393/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1921.564012] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12dce329-cfe5-4dba-8cee-e16572025526 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.571916] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1921.571916] env[68282]: value = "task-3470610" [ 1921.571916] env[68282]: _type = "Task" [ 1921.571916] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.579782] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.082377] env[68282]: DEBUG oslo_vmware.exceptions [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1922.082643] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.083198] env[68282]: ERROR nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.083198] env[68282]: Faults: ['InvalidArgument'] [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Traceback (most recent call last): [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] yield resources [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self.driver.spawn(context, instance, image_meta, [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self._fetch_image_if_missing(context, vi) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] image_cache(vi, tmp_image_ds_loc) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] vm_util.copy_virtual_disk( [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] session._wait_for_task(vmdk_copy_task) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return self.wait_for_task(task_ref) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return evt.wait() [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] result = hub.switch() [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return self.greenlet.switch() [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self.f(*self.args, **self.kw) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] raise exceptions.translate_fault(task_info.error) [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Faults: ['InvalidArgument'] [ 1922.083198] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] [ 1922.084387] env[68282]: INFO nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Terminating instance [ 1922.085035] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.085266] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1922.085496] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1deffd8a-fd20-4c5b-a4b8-62d2db70a6c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.087571] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1922.087768] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1922.088512] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0bfab7-033f-4a44-92c8-dfea31b7ea83 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.095028] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1922.095234] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f87ebca9-9e4b-444b-865f-66c421aaee08 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.097324] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1922.097499] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1922.098415] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81b856f6-6160-42fa-9c08-2b7a998ed3a3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.102926] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for the task: (returnval){ [ 1922.102926] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523a41e1-06cb-927b-0cf9-4a00591dbfe4" [ 1922.102926] env[68282]: _type = "Task" [ 1922.102926] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.111510] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523a41e1-06cb-927b-0cf9-4a00591dbfe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.165460] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1922.165708] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1922.165892] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleting the datastore file [datastore2] 9874370f-917a-412b-91ce-a92e73d6ac0d {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1922.166173] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dea921de-46da-492d-8dd4-8a3fac791cfa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.172471] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 1922.172471] env[68282]: value = "task-3470612" [ 1922.172471] env[68282]: _type = "Task" [ 1922.172471] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.179893] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.613601] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1922.614109] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Creating directory with path [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1922.614211] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feac2fa1-3e1d-4324-b0e2-76855055582c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.625066] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Created directory with path [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1922.625261] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Fetch image to [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1922.625436] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1922.626171] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10effe19-a5fc-43ec-8538-90a6394e0ab3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.632816] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6308fb9-f1d7-4da6-851c-c1144799f4b4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.641840] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f51e36-e2fb-4a80-be83-6d7009dab5d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.671952] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad3efd8-32be-4394-8299-67f006e753a6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.682805] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6c8b1d18-45c9-40dc-acb6-7f390acf1b09 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.684505] env[68282]: DEBUG oslo_vmware.api [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065574} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.684750] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.684931] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1922.685119] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1922.685295] env[68282]: INFO nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1922.687444] env[68282]: DEBUG nova.compute.claims [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1922.687619] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.687832] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.705936] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1922.758841] env[68282]: DEBUG oslo_vmware.rw_handles [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1922.820338] env[68282]: DEBUG oslo_vmware.rw_handles [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1922.820542] env[68282]: DEBUG oslo_vmware.rw_handles [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1922.822191] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1922.837766] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1922.837977] env[68282]: DEBUG nova.compute.provider_tree [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1922.848527] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1922.865230] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1922.984213] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45171ddb-2cb5-4aa3-8bad-5fe3f8b770ba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.992666] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e9596a-c66a-406a-b093-f1d861dd2b4b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.021719] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61e9f72-fffc-473c-9b82-dceacefc42a9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.028420] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b302dc-3443-4560-9e63-3cc275ead532 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.040969] env[68282]: DEBUG nova.compute.provider_tree [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.049149] env[68282]: DEBUG nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1923.065176] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.377s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.065709] env[68282]: ERROR nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1923.065709] env[68282]: Faults: ['InvalidArgument'] [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Traceback (most recent call last): [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self.driver.spawn(context, instance, image_meta, [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self._fetch_image_if_missing(context, vi) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] image_cache(vi, tmp_image_ds_loc) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] vm_util.copy_virtual_disk( [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] session._wait_for_task(vmdk_copy_task) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return self.wait_for_task(task_ref) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return evt.wait() [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] result = hub.switch() [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] return self.greenlet.switch() [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] self.f(*self.args, **self.kw) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] raise exceptions.translate_fault(task_info.error) [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Faults: ['InvalidArgument'] [ 1923.065709] env[68282]: ERROR nova.compute.manager [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] [ 1923.066771] env[68282]: DEBUG nova.compute.utils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1923.067778] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Build of instance 9874370f-917a-412b-91ce-a92e73d6ac0d was re-scheduled: A specified parameter was not correct: fileType [ 1923.067778] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1923.068183] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1923.068392] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1923.068573] env[68282]: DEBUG nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1923.068752] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1923.374985] env[68282]: DEBUG nova.network.neutron [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.387013] env[68282]: INFO nova.compute.manager [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Took 0.32 seconds to deallocate network for instance. [ 1923.507014] env[68282]: INFO nova.scheduler.client.report [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted allocations for instance 9874370f-917a-412b-91ce-a92e73d6ac0d [ 1923.530181] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a79ac20b-4784-4790-aac5-5e4c044ce2fa tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.332s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.531384] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 394.890s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.531883] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.531883] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.532538] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.534034] env[68282]: INFO nova.compute.manager [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Terminating instance [ 1923.535644] env[68282]: DEBUG nova.compute.manager [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1923.535830] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1923.538339] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b36d2f1a-86d5-4011-8e77-4a3754c16515 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.545667] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a209c2-0636-4a15-8212-31d2a3931416 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.563411] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1923.585771] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9874370f-917a-412b-91ce-a92e73d6ac0d could not be found. [ 1923.586610] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1923.586901] env[68282]: INFO nova.compute.manager [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1923.587178] env[68282]: DEBUG oslo.service.loopingcall [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.587445] env[68282]: DEBUG nova.compute.manager [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1923.587538] env[68282]: DEBUG nova.network.neutron [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1923.614163] env[68282]: DEBUG nova.network.neutron [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.623133] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.625530] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.625530] env[68282]: INFO nova.compute.claims [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1923.628409] env[68282]: INFO nova.compute.manager [-] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] Took 0.04 seconds to deallocate network for instance. [ 1923.711084] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4cc1e4c5-f2da-4855-a785-e55b7b727236 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.712511] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 30.112s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.712709] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 9874370f-917a-412b-91ce-a92e73d6ac0d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1923.712887] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "9874370f-917a-412b-91ce-a92e73d6ac0d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.797953] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9b20f9-b071-4b1b-aade-b49282128d5e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.805675] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3294dde6-fedf-4626-a162-90816c9930e4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.836636] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f03fedf-5cec-44c5-b089-654520896618 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.844082] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cde682a-7b27-47e4-a3ce-fa07c4d44f8d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.856916] env[68282]: DEBUG nova.compute.provider_tree [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.865645] env[68282]: DEBUG nova.scheduler.client.report [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1923.882117] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.882592] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1923.914744] env[68282]: DEBUG nova.compute.utils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1923.916593] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1923.916772] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1923.924667] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1923.972364] env[68282]: DEBUG nova.policy [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeff02086d114be7816a6d2558c9c8fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ea2948d9c0a046a09077c014de41faeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1923.986811] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1924.011973] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1924.012240] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1924.012404] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1924.012587] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1924.012735] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1924.012885] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1924.013103] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1924.013269] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1924.013439] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1924.013602] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1924.013775] env[68282]: DEBUG nova.virt.hardware [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1924.014632] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96939f35-40fa-4574-9da3-00cf7f89badd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.022634] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd940e8-4067-431c-8cbb-a8fed474a2db {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.258391] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Successfully created port: ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1924.851369] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Successfully updated port: ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1924.860951] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.861240] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.861521] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1924.907058] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1925.068141] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Updating instance_info_cache with network_info: [{"id": "ec213290-bc57-4b11-be96-72f8cd54b44e", "address": "fa:16:3e:a4:aa:e1", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec213290-bc", "ovs_interfaceid": "ec213290-bc57-4b11-be96-72f8cd54b44e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.083429] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.083819] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance network_info: |[{"id": "ec213290-bc57-4b11-be96-72f8cd54b44e", "address": "fa:16:3e:a4:aa:e1", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec213290-bc", "ovs_interfaceid": "ec213290-bc57-4b11-be96-72f8cd54b44e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1925.084692] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:aa:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec213290-bc57-4b11-be96-72f8cd54b44e', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1925.096763] env[68282]: DEBUG oslo.service.loopingcall [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1925.097253] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.098713] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1925.099249] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2381db7-e786-419b-a472-8e3d2d8b8437 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.115119] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.115324] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.115493] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.115644] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1925.117079] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49567a13-ba09-4b8d-a29d-39fa92cbea16 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.121165] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1925.121165] env[68282]: value = "task-3470613" [ 1925.121165] env[68282]: _type = "Task" [ 1925.121165] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.127122] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878a64f0-b431-4b40-a0cc-cc953e028020 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.133336] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470613, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.144011] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925893d9-66c1-4605-8e78-161a150a9f26 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.149858] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29f908c-031a-4bfc-9388-6c83afac3428 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.178484] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180946MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1925.178684] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.178828] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.255619] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.255801] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.255933] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256073] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256264] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256412] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256531] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256648] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256762] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.256875] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1925.257073] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1925.257211] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1925.386394] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50892b4c-d6d7-4811-a92d-2a9bf7d68987 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.393673] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f8ba6b-d8c0-4a25-8bcc-cd50987fec77 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.425890] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650b25b8-7f92-4db1-884b-8f390e513b52 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.429344] env[68282]: DEBUG nova.compute.manager [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Received event network-vif-plugged-ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1925.429549] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Acquiring lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.429754] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.429920] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.430098] env[68282]: DEBUG nova.compute.manager [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] No waiting events found dispatching network-vif-plugged-ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1925.430268] env[68282]: WARNING nova.compute.manager [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Received unexpected event network-vif-plugged-ec213290-bc57-4b11-be96-72f8cd54b44e for instance with vm_state building and task_state spawning. [ 1925.430428] env[68282]: DEBUG nova.compute.manager [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Received event network-changed-ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1925.430583] env[68282]: DEBUG nova.compute.manager [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Refreshing instance network info cache due to event network-changed-ec213290-bc57-4b11-be96-72f8cd54b44e. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1925.430762] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Acquiring lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.430896] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Acquired lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.431063] env[68282]: DEBUG nova.network.neutron [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Refreshing network info cache for port ec213290-bc57-4b11-be96-72f8cd54b44e {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1925.437324] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b1e53e-cfa7-454b-9d99-1a27bcea4f6e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.452929] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1925.460707] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1925.474466] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1925.474652] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.296s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.630607] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470613, 'name': CreateVM_Task, 'duration_secs': 0.284472} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.630875] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1925.631551] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.631721] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.632036] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1925.632281] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f23c61e-664c-4bad-a78b-06ea28f27347 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.636730] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1925.636730] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b743ba-9774-dd5f-dbd8-73b1970225b8" [ 1925.636730] env[68282]: _type = "Task" [ 1925.636730] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.644132] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52b743ba-9774-dd5f-dbd8-73b1970225b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.689953] env[68282]: DEBUG nova.network.neutron [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Updated VIF entry in instance network info cache for port ec213290-bc57-4b11-be96-72f8cd54b44e. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1925.690356] env[68282]: DEBUG nova.network.neutron [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Updating instance_info_cache with network_info: [{"id": "ec213290-bc57-4b11-be96-72f8cd54b44e", "address": "fa:16:3e:a4:aa:e1", "network": {"id": "e6adfa42-442e-48a7-840e-1591e813c1c5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-333587985-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ea2948d9c0a046a09077c014de41faeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec213290-bc", "ovs_interfaceid": "ec213290-bc57-4b11-be96-72f8cd54b44e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.699486] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ef3c796-f6f7-4c20-9c1d-86387353cf38 req-5bc39b30-e33c-4ff3-a639-527236731745 service nova] Releasing lock "refresh_cache-e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.147232] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.147492] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1926.147712] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.467173] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1927.467549] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1927.467549] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1927.488466] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.488659] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.488802] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.488934] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489076] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489254] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489347] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489464] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489589] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489710] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1927.489835] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1928.088060] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1928.088345] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1931.083553] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.087840] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.088152] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.088978] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.088978] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.087391] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.812718] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "14e97724-1044-4f32-ac27-8062120c2c46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.813315] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.658179] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "41e28779-65ad-476c-bc9c-9747beba2813" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.658597] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.109429] env[68282]: WARNING oslo_vmware.rw_handles [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1971.109429] env[68282]: ERROR oslo_vmware.rw_handles [ 1971.110293] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1971.112142] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1971.112391] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Copying Virtual Disk [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/adce6bef-2cda-4483-9d01-d97df821b2ba/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1971.112690] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9984d6ea-b8d9-4f90-93ba-2f2eb45c105e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.121115] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for the task: (returnval){ [ 1971.121115] env[68282]: value = "task-3470614" [ 1971.121115] env[68282]: _type = "Task" [ 1971.121115] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.129195] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Task: {'id': task-3470614, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.236352] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.631123] env[68282]: DEBUG oslo_vmware.exceptions [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1971.631413] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1971.631974] env[68282]: ERROR nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1971.631974] env[68282]: Faults: ['InvalidArgument'] [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Traceback (most recent call last): [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] yield resources [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self.driver.spawn(context, instance, image_meta, [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self._fetch_image_if_missing(context, vi) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] image_cache(vi, tmp_image_ds_loc) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] vm_util.copy_virtual_disk( [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] session._wait_for_task(vmdk_copy_task) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return self.wait_for_task(task_ref) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return evt.wait() [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] result = hub.switch() [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return self.greenlet.switch() [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self.f(*self.args, **self.kw) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] raise exceptions.translate_fault(task_info.error) [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Faults: ['InvalidArgument'] [ 1971.631974] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] [ 1971.633014] env[68282]: INFO nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Terminating instance [ 1971.633914] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.634138] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1971.634394] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fd007e2-56c4-4ad4-972a-5d23b3d35f29 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.636704] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1971.636905] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1971.637602] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c853f0c5-cbe5-407b-973f-bef2954f6330 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.644445] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1971.644647] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bca4a1e-f7cf-4ee5-82a4-d68b3ed93401 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.646711] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1971.646884] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1971.647839] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a65e533-4e51-4f7e-ba53-54dc8ac12222 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.652703] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Waiting for the task: (returnval){ [ 1971.652703] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]529f90f3-bd7b-756d-db12-6eeb3123f0db" [ 1971.652703] env[68282]: _type = "Task" [ 1971.652703] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.662920] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]529f90f3-bd7b-756d-db12-6eeb3123f0db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.707763] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1971.707985] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1971.708183] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Deleting the datastore file [datastore2] 0e8afd42-0759-41c0-892a-c4f852d5d3e4 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1971.708439] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1045a15e-f923-4be1-952c-7d6d34ca89c8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.715082] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for the task: (returnval){ [ 1971.715082] env[68282]: value = "task-3470616" [ 1971.715082] env[68282]: _type = "Task" [ 1971.715082] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.722879] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Task: {'id': task-3470616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.162781] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1972.163072] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Creating directory with path [datastore2] vmware_temp/d12f0432-2ef7-41c2-a3b0-cda79b7b367e/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1972.163307] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c45b2358-315d-4bcc-8dcc-424a3352be7c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.176564] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Created directory with path [datastore2] vmware_temp/d12f0432-2ef7-41c2-a3b0-cda79b7b367e/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1972.176752] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Fetch image to [datastore2] vmware_temp/d12f0432-2ef7-41c2-a3b0-cda79b7b367e/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1972.177032] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/d12f0432-2ef7-41c2-a3b0-cda79b7b367e/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1972.177757] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c39f74-d4a1-4f93-aab7-5bb04c4ee8a9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.185031] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5bf63a-86f6-48fc-834d-e8b50314b3bd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.193397] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800162dc-abd1-412a-8c3a-85817c7f8668 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.227353] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84f43cc-8941-4458-acf2-da69142a2ea8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.234303] env[68282]: DEBUG oslo_vmware.api [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Task: {'id': task-3470616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072065} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.235736] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1972.235931] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1972.236119] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1972.236299] env[68282]: INFO nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1972.238085] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a3c09aad-f77d-4ce2-a234-e6f91f317331 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.239943] env[68282]: DEBUG nova.compute.claims [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1972.240126] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.240338] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.266172] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1972.381733] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.382532] env[68282]: ERROR nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] yield resources [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.driver.spawn(context, instance, image_meta, [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1972.382532] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._fetch_image_if_missing(context, vi) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image_fetch(context, vi, tmp_image_ds_loc) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] images.fetch_image( [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] metadata = IMAGE_API.get(context, image_ref) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return session.show(context, image_id, [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] _reraise_translated_image_exception(image_id) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise new_exc.with_traceback(exc_trace) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1972.383606] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1972.383606] env[68282]: INFO nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Terminating instance [ 1972.384708] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.385216] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1972.385216] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe610afb-da53-484f-8360-5cba1acc354a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.388386] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1972.388602] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1972.391582] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77932a5-350f-4129-b50c-e00642891a34 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.399140] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1972.399372] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c85e4d7-158f-4462-9e66-9ea4f9e18744 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.401638] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1972.401813] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1972.402747] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28f51518-92b1-4042-ae14-583a6fa70eb3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.409676] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 1972.409676] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5214d0b3-d60f-be4a-dfb3-b7038846803b" [ 1972.409676] env[68282]: _type = "Task" [ 1972.409676] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.418854] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5214d0b3-d60f-be4a-dfb3-b7038846803b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.442621] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b11af10-9c2e-46b1-a266-e41e95cfc866 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.449486] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da76b8d6-99ac-418d-9efe-c7ae06e19361 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.480088] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d33d47-a5a8-4e1f-ad62-f3729482af02 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.487910] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9a6e0e-f22f-4da2-8558-b6bcbc0d9a20 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.493620] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1972.493935] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1972.494059] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Deleting the datastore file [datastore2] a9d0de25-ef21-4725-a6c1-f6fac2593bb9 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1972.494568] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c5a502c-ba8e-4a52-9a47-f4888cf95595 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.504370] env[68282]: DEBUG nova.compute.provider_tree [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1972.509971] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Waiting for the task: (returnval){ [ 1972.509971] env[68282]: value = "task-3470618" [ 1972.509971] env[68282]: _type = "Task" [ 1972.509971] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.515032] env[68282]: DEBUG nova.scheduler.client.report [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1972.522862] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Task: {'id': task-3470618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.531033] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.290s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.531033] env[68282]: ERROR nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1972.531033] env[68282]: Faults: ['InvalidArgument'] [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Traceback (most recent call last): [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self.driver.spawn(context, instance, image_meta, [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self._fetch_image_if_missing(context, vi) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] image_cache(vi, tmp_image_ds_loc) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] vm_util.copy_virtual_disk( [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] session._wait_for_task(vmdk_copy_task) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return self.wait_for_task(task_ref) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return evt.wait() [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] result = hub.switch() [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] return self.greenlet.switch() [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] self.f(*self.args, **self.kw) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] raise exceptions.translate_fault(task_info.error) [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Faults: ['InvalidArgument'] [ 1972.531033] env[68282]: ERROR nova.compute.manager [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] [ 1972.532071] env[68282]: DEBUG nova.compute.utils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1972.532678] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Build of instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 was re-scheduled: A specified parameter was not correct: fileType [ 1972.532678] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1972.533058] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1972.533281] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1972.533463] env[68282]: DEBUG nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1972.533637] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1972.907772] env[68282]: DEBUG nova.network.neutron [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.919676] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1972.919961] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1972.920216] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c9d2c69-f581-4a38-9295-e6037a5f0326 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.923861] env[68282]: INFO nova.compute.manager [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Took 0.39 seconds to deallocate network for instance. [ 1972.936321] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1972.936593] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Fetch image to [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1972.936912] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1972.937581] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c71f87-5a24-40f0-96b0-34223608b261 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.945219] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663fd727-5a30-42ca-9225-1324d0d25794 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.954724] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120ed78e-3664-4e3d-ae5b-2ea620c5228f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.987954] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9734f6f9-f283-4c03-a1c4-69cf055fb1b5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.998178] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f7499ad8-00ac-48ad-b754-19af649e8476 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.019889] env[68282]: DEBUG oslo_vmware.api [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Task: {'id': task-3470618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091612} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.021425] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1973.021626] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1973.021807] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1973.022034] env[68282]: INFO nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1973.023830] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1973.026581] env[68282]: DEBUG nova.compute.claims [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1973.026758] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.026997] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.054514] env[68282]: INFO nova.scheduler.client.report [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Deleted allocations for instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 [ 1973.080321] env[68282]: DEBUG oslo_concurrency.lockutils [None req-754f47b7-1999-4392-b08f-22acf66c5bb4 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.460s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.081453] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.438s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.081680] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Acquiring lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.081884] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.082117] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.084270] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1973.086123] env[68282]: INFO nova.compute.manager [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Terminating instance [ 1973.090302] env[68282]: DEBUG nova.compute.manager [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1973.090502] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1973.092014] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b74a454-2858-48e8-a535-d4f4b19a769b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.152205] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1973.160939] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e83c4e2-7f3a-4ad7-9ba3-afbd531bce41 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.175091] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1973.175535] env[68282]: DEBUG oslo_vmware.rw_handles [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1973.194821] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0e8afd42-0759-41c0-892a-c4f852d5d3e4 could not be found. [ 1973.195056] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1973.195226] env[68282]: INFO nova.compute.manager [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1973.195472] env[68282]: DEBUG oslo.service.loopingcall [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1973.200236] env[68282]: DEBUG nova.compute.manager [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1973.200357] env[68282]: DEBUG nova.network.neutron [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.213820] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.298862] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76dc595f-e92a-46ad-8f00-c89dafc135f5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.306718] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202abb2d-bf06-459c-ac5b-3adf53b26ed5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.339784] env[68282]: DEBUG nova.network.neutron [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.341336] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc549f9-98fe-4f9e-ac6b-50660bc03063 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.348921] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfef058b-3071-4b1b-b8cc-8553e3f31928 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.353302] env[68282]: INFO nova.compute.manager [-] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] Took 0.15 seconds to deallocate network for instance. [ 1973.365363] env[68282]: DEBUG nova.compute.provider_tree [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.373982] env[68282]: DEBUG nova.scheduler.client.report [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1973.401560] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.374s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.402077] env[68282]: ERROR nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.driver.spawn(context, instance, image_meta, [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._fetch_image_if_missing(context, vi) [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1973.402077] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image_fetch(context, vi, tmp_image_ds_loc) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] images.fetch_image( [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] metadata = IMAGE_API.get(context, image_ref) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return session.show(context, image_id, [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] _reraise_translated_image_exception(image_id) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise new_exc.with_traceback(exc_trace) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1973.402944] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.402944] env[68282]: DEBUG nova.compute.utils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1973.404115] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.190s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.405543] env[68282]: INFO nova.compute.claims [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.409777] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Build of instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 was re-scheduled: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1973.410553] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1973.410824] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1973.411111] env[68282]: DEBUG nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1973.411586] env[68282]: DEBUG nova.network.neutron [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.474349] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ea032ab3-432b-415f-823d-66d2e6abadb6 tempest-ListServerFiltersTestJSON-1177380021 tempest-ListServerFiltersTestJSON-1177380021-project-member] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.393s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.475219] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 79.874s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.475409] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 0e8afd42-0759-41c0-892a-c4f852d5d3e4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1973.475584] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "0e8afd42-0759-41c0-892a-c4f852d5d3e4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.522590] env[68282]: DEBUG neutronclient.v2_0.client [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1973.523877] env[68282]: ERROR nova.compute.manager [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.driver.spawn(context, instance, image_meta, [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._fetch_image_if_missing(context, vi) [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1973.523877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image_fetch(context, vi, tmp_image_ds_loc) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] images.fetch_image( [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] metadata = IMAGE_API.get(context, image_ref) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return session.show(context, image_id, [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] _reraise_translated_image_exception(image_id) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise new_exc.with_traceback(exc_trace) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = getattr(controller, method)(*args, **kwargs) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._get(image_id) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] resp, body = self.http_client.get(url, headers=header) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.request(url, 'GET', **kwargs) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self._handle_response(resp) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exc.from_response(resp, resp.content) [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.ImageNotAuthorized: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._build_and_run_instance(context, instance, image, [ 1973.524877] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exception.RescheduledException( [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.RescheduledException: Build of instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 was re-scheduled: Not authorized for image 658717f1-7b98-47ed-bf66-8ef1a68a7047. [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] exception_handler_v20(status_code, error_body) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise client_exc(message=error_message, [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Neutron server returns request_ids: ['req-39072520-555d-4968-80fe-ba9ca0ff9f3c'] [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._deallocate_network(context, instance, requested_networks) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.network_api.deallocate_for_instance( [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] data = neutron.list_ports(**search_opts) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.list('ports', self.ports_path, retrieve_all, [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] for r in self._pagination(collection, path, **params): [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1973.525969] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] res = self.get(path, params=params) [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.retry_request("GET", action, body=body, [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.do_request(method, action, body=body, [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._handle_fault_response(status_code, replybody, resp) [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exception.Unauthorized() [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.Unauthorized: Not authorized. [ 1973.527703] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.581464] env[68282]: INFO nova.scheduler.client.report [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Deleted allocations for instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 [ 1973.602514] env[68282]: DEBUG oslo_concurrency.lockutils [None req-0beff682-3662-43fe-b1c3-79665b66a629 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.650s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.603600] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.689s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.603824] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Acquiring lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.604036] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.604213] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.606262] env[68282]: INFO nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Terminating instance [ 1973.608137] env[68282]: DEBUG nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1973.608332] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1973.609226] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cd250c4d-3f0e-4d80-bef5-6a16c0d2cf34 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.613211] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64696b37-1014-41af-a175-b65d24858842 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.621757] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35af1b1b-25fa-4448-9fb6-60f834fe018e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.629249] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd14e2b9-1acf-4468-b158-065208645c90 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.640874] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1973.678664] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55806ba-9a6a-4eab-8b59-75d2b1b0b283 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.681734] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a9d0de25-ef21-4725-a6c1-f6fac2593bb9 could not be found. [ 1973.681939] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1973.682133] env[68282]: INFO nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1973.682458] env[68282]: DEBUG oslo.service.loopingcall [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1973.683244] env[68282]: DEBUG nova.compute.manager [-] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1973.683347] env[68282]: DEBUG nova.network.neutron [-] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.692981] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d24c37a-d816-47b2-9434-ba890e4a248d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.707685] env[68282]: DEBUG nova.compute.provider_tree [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1973.709668] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.716638] env[68282]: DEBUG nova.scheduler.client.report [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1973.731046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.731728] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1973.735738] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.027s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.737108] env[68282]: INFO nova.compute.claims [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.763723] env[68282]: DEBUG nova.compute.utils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1973.764966] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1973.765163] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1973.778114] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1973.781461] env[68282]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68282) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1973.781682] env[68282]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-c3857fc6-8dac-4aa1-bc0c-b93911e62f73'] [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1973.782221] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1973.783501] env[68282]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1973.783501] env[68282]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1973.783501] env[68282]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1973.783501] env[68282]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.783501] env[68282]: ERROR oslo.service.loopingcall [ 1973.783501] env[68282]: ERROR nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.831348] env[68282]: ERROR nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] exception_handler_v20(status_code, error_body) [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise client_exc(message=error_message, [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Neutron server returns request_ids: ['req-c3857fc6-8dac-4aa1-bc0c-b93911e62f73'] [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During handling of the above exception, another exception occurred: [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Traceback (most recent call last): [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._delete_instance(context, instance, bdms) [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._shutdown_instance(context, instance, bdms) [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._try_deallocate_network(context, instance, requested_networks) [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] with excutils.save_and_reraise_exception(): [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.force_reraise() [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise self.value [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] _deallocate_network_with_retries() [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return evt.wait() [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1973.831348] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = hub.switch() [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.greenlet.switch() [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = func(*self.args, **self.kw) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] result = f(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._deallocate_network( [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self.network_api.deallocate_for_instance( [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] data = neutron.list_ports(**search_opts) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.list('ports', self.ports_path, retrieve_all, [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] for r in self._pagination(collection, path, **params): [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] res = self.get(path, params=params) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.retry_request("GET", action, body=body, [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] return self.do_request(method, action, body=body, [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] ret = obj(*args, **kwargs) [ 1973.832552] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1973.833818] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] self._handle_fault_response(status_code, replybody, resp) [ 1973.833818] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1973.833818] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1973.833818] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.833818] env[68282]: ERROR nova.compute.manager [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] [ 1973.838400] env[68282]: DEBUG nova.policy [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a603a37bebe4a1c9b8f5d3b1cf5f34f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0695dbb014ba4c359215dbb84bb1d314', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1973.854905] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1973.861294] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.257s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.862149] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 80.261s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.862366] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1973.862553] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "a9d0de25-ef21-4725-a6c1-f6fac2593bb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.881348] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1973.881917] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1973.881917] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1973.881917] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1973.882065] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1973.882158] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1973.882374] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1973.882543] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1973.882713] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1973.882877] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1973.883075] env[68282]: DEBUG nova.virt.hardware [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1973.883956] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ffd8ab-bb3b-4db6-9883-13c82bd6fa10 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.897203] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecd5511-2a6a-4882-a057-77ae20cf36e8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.918674] env[68282]: INFO nova.compute.manager [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] [instance: a9d0de25-ef21-4725-a6c1-f6fac2593bb9] Successfully reverted task state from None on failure for instance. [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server [None req-a7acff01-0c13-46f2-bb45-4847067ab2a5 tempest-ServersTestMultiNic-851115543 tempest-ServersTestMultiNic-851115543-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-c3857fc6-8dac-4aa1-bc0c-b93911e62f73'] [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.921284] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server raise self.value [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1973.922599] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1973.923950] env[68282]: ERROR oslo_messaging.rpc.server [ 1973.939009] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7405524b-d742-4307-b67d-734d5c14bd25 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.946333] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edff25f1-52e2-44be-bee6-2b8abf7cd075 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.977113] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b605437-51ae-462a-8161-afc213ef67bc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.984043] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735cde9d-3b2f-4517-b47a-ea75ef77e1c1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.996911] env[68282]: DEBUG nova.compute.provider_tree [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1974.004743] env[68282]: DEBUG nova.scheduler.client.report [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1974.019500] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.019967] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1974.057627] env[68282]: DEBUG nova.compute.utils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1974.058978] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1974.059745] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1974.069015] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1974.118056] env[68282]: DEBUG nova.policy [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '586f6880a99449eeab1379280df867a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12975c11434b4530b1f38c1eceaa4e68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 1974.141378] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1974.171194] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1974.172681] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1974.173104] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1974.173104] env[68282]: DEBUG nova.virt.hardware [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1974.174230] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6d7547-2d4a-4c08-ae3d-2d8e3f8dbb1e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.182620] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b89da8-058e-4ab9-8041-7938c5251da7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.217615] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Successfully created port: ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1974.447255] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Successfully created port: 22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1975.035963] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Successfully updated port: 22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1975.051644] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.051644] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.051644] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1975.092198] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1975.152261] env[68282]: DEBUG nova.compute.manager [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Received event network-vif-plugged-22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1975.152261] env[68282]: DEBUG oslo_concurrency.lockutils [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] Acquiring lock "41e28779-65ad-476c-bc9c-9747beba2813-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.152261] env[68282]: DEBUG oslo_concurrency.lockutils [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] Lock "41e28779-65ad-476c-bc9c-9747beba2813-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.152261] env[68282]: DEBUG oslo_concurrency.lockutils [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] Lock "41e28779-65ad-476c-bc9c-9747beba2813-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.152261] env[68282]: DEBUG nova.compute.manager [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] No waiting events found dispatching network-vif-plugged-22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1975.152261] env[68282]: WARNING nova.compute.manager [req-104d29ad-5b50-447d-bc96-67d9befc29c3 req-555801d4-40ed-49ee-a9b2-7599762bc5a4 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Received unexpected event network-vif-plugged-22dd036c-b21e-476f-acee-e81b41927b26 for instance with vm_state building and task_state spawning. [ 1975.198676] env[68282]: DEBUG nova.compute.manager [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Received event network-vif-plugged-ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1975.199263] env[68282]: DEBUG oslo_concurrency.lockutils [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] Acquiring lock "14e97724-1044-4f32-ac27-8062120c2c46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.199593] env[68282]: DEBUG oslo_concurrency.lockutils [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] Lock "14e97724-1044-4f32-ac27-8062120c2c46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.199812] env[68282]: DEBUG oslo_concurrency.lockutils [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] Lock "14e97724-1044-4f32-ac27-8062120c2c46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.200421] env[68282]: DEBUG nova.compute.manager [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] No waiting events found dispatching network-vif-plugged-ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1975.200689] env[68282]: WARNING nova.compute.manager [req-4d4c9a28-120b-4da8-bfc4-40026f8498d5 req-8b25b283-057f-4e46-b14e-26905c95987c service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Received unexpected event network-vif-plugged-ac3b6a07-cbb4-4b8e-bee9-453387f4101f for instance with vm_state building and task_state spawning. [ 1975.268578] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Updating instance_info_cache with network_info: [{"id": "22dd036c-b21e-476f-acee-e81b41927b26", "address": "fa:16:3e:ad:bc:8b", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22dd036c-b2", "ovs_interfaceid": "22dd036c-b21e-476f-acee-e81b41927b26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.284373] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.284976] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance network_info: |[{"id": "22dd036c-b21e-476f-acee-e81b41927b26", "address": "fa:16:3e:ad:bc:8b", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22dd036c-b2", "ovs_interfaceid": "22dd036c-b21e-476f-acee-e81b41927b26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1975.285669] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:bc:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22dd036c-b21e-476f-acee-e81b41927b26', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.296278] env[68282]: DEBUG oslo.service.loopingcall [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.296278] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1975.296278] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-561f8972-f0ab-4287-822f-99e42ba55ac7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.319160] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Successfully updated port: ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1975.321405] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.321405] env[68282]: value = "task-3470619" [ 1975.321405] env[68282]: _type = "Task" [ 1975.321405] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.332174] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470619, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.333451] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.333631] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.333813] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1975.420195] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1975.699509] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Updating instance_info_cache with network_info: [{"id": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "address": "fa:16:3e:20:eb:0b", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac3b6a07-cb", "ovs_interfaceid": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.713796] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.714372] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance network_info: |[{"id": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "address": "fa:16:3e:20:eb:0b", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac3b6a07-cb", "ovs_interfaceid": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1975.715220] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:eb:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac3b6a07-cbb4-4b8e-bee9-453387f4101f', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.722981] env[68282]: DEBUG oslo.service.loopingcall [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.727012] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1975.727012] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c2de6d7-6deb-4523-b2d3-24fd93aa8f7d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.744683] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.744683] env[68282]: value = "task-3470620" [ 1975.744683] env[68282]: _type = "Task" [ 1975.744683] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.752810] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470620, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.831890] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470619, 'name': CreateVM_Task, 'duration_secs': 0.299761} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.831890] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1975.838839] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.839015] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.839365] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.839631] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ded84843-b6de-4e55-8688-978e7097421a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.843889] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 1975.843889] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52299947-e2e1-ab34-745e-31e7fe02e7a2" [ 1975.843889] env[68282]: _type = "Task" [ 1975.843889] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.851045] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52299947-e2e1-ab34-745e-31e7fe02e7a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.255667] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470620, 'name': CreateVM_Task, 'duration_secs': 0.292009} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.255954] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1976.256484] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.353956] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.354556] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.354791] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.355015] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.355326] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1976.355573] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f88f2fc3-ab0f-494b-8900-c12f913088de {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.360097] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 1976.360097] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5246bad8-d8ab-e6f0-9250-2011e86d5b5c" [ 1976.360097] env[68282]: _type = "Task" [ 1976.360097] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.366929] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5246bad8-d8ab-e6f0-9250-2011e86d5b5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.870204] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.870474] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.870695] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.174650] env[68282]: DEBUG nova.compute.manager [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Received event network-changed-22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1977.174650] env[68282]: DEBUG nova.compute.manager [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Refreshing instance network info cache due to event network-changed-22dd036c-b21e-476f-acee-e81b41927b26. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1977.174650] env[68282]: DEBUG oslo_concurrency.lockutils [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] Acquiring lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.174650] env[68282]: DEBUG oslo_concurrency.lockutils [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] Acquired lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.174872] env[68282]: DEBUG nova.network.neutron [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Refreshing network info cache for port 22dd036c-b21e-476f-acee-e81b41927b26 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1977.228187] env[68282]: DEBUG nova.compute.manager [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Received event network-changed-ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 1977.228281] env[68282]: DEBUG nova.compute.manager [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Refreshing instance network info cache due to event network-changed-ac3b6a07-cbb4-4b8e-bee9-453387f4101f. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 1977.228479] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] Acquiring lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.228654] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] Acquired lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.228825] env[68282]: DEBUG nova.network.neutron [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Refreshing network info cache for port ac3b6a07-cbb4-4b8e-bee9-453387f4101f {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1977.493515] env[68282]: DEBUG nova.network.neutron [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Updated VIF entry in instance network info cache for port ac3b6a07-cbb4-4b8e-bee9-453387f4101f. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1977.493864] env[68282]: DEBUG nova.network.neutron [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Updating instance_info_cache with network_info: [{"id": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "address": "fa:16:3e:20:eb:0b", "network": {"id": "a4a3552a-50b6-4f21-8281-1a40d7f4da04", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-640716636-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0695dbb014ba4c359215dbb84bb1d314", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9f430b-e6f7-4a47-abd0-3cc7bef3e97c", "external-id": "nsx-vlan-transportzone-977", "segmentation_id": 977, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac3b6a07-cb", "ovs_interfaceid": "ac3b6a07-cbb4-4b8e-bee9-453387f4101f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.503437] env[68282]: DEBUG oslo_concurrency.lockutils [req-06f1c70f-811c-425a-aa0f-b08f7a385267 req-5c15d2cc-cedf-4cc8-9a28-0646cb7c748f service nova] Releasing lock "refresh_cache-14e97724-1044-4f32-ac27-8062120c2c46" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.532052] env[68282]: DEBUG nova.network.neutron [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Updated VIF entry in instance network info cache for port 22dd036c-b21e-476f-acee-e81b41927b26. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1977.532415] env[68282]: DEBUG nova.network.neutron [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Updating instance_info_cache with network_info: [{"id": "22dd036c-b21e-476f-acee-e81b41927b26", "address": "fa:16:3e:ad:bc:8b", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22dd036c-b2", "ovs_interfaceid": "22dd036c-b21e-476f-acee-e81b41927b26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1977.541604] env[68282]: DEBUG oslo_concurrency.lockutils [req-a97b56f2-fa3f-4db7-8134-cd9797a79dc2 req-651345cd-5eb5-45b3-9314-194077c1b8b0 service nova] Releasing lock "refresh_cache-41e28779-65ad-476c-bc9c-9747beba2813" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.161777] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.087506] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1986.100235] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.100454] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.100625] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.100783] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1986.101906] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8007ad49-a149-4c28-af07-db7ae042c8f6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.110518] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a5fa5c-02ed-4be5-b46b-785f25d14858 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.124967] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253eed5d-ec2e-4d18-a55c-72772b4fd511 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.130973] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5e2cbd-dfae-4c6d-9633-cc57c140550d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.160567] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1986.160705] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.160891] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.231978] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232165] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232297] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232421] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232543] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232662] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232780] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.232895] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.233019] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.233142] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1986.233326] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1986.233462] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1986.344867] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a28fc61-5a61-4f32-b826-8b9470240d5b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.352435] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21b00da-a43f-4784-a9aa-ed4bfb1996eb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.381140] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cd258d-feab-419a-82a5-a3ef5247a324 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.387696] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1823b419-e619-4108-895e-a7bf92f50b2b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.400294] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.408173] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1986.422016] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1986.422212] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.261s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.422650] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1987.423026] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 1987.423026] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 1987.443359] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.443513] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.443656] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.443790] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.443917] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444050] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444217] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444296] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444415] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444533] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 1987.444653] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 1990.087521] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1990.087964] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 1991.083463] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.088617] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1992.088910] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.083530] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.087176] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.087360] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.087566] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.642425] env[68282]: WARNING oslo_vmware.rw_handles [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2018.642425] env[68282]: ERROR oslo_vmware.rw_handles [ 2018.643223] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2018.645270] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2018.645523] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Copying Virtual Disk [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/bfa42f29-ed24-42ad-9aa0-3bec87d28258/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2018.645845] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5f1c6cd-2a23-407f-81e4-1c81c4b2d2d2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.654606] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 2018.654606] env[68282]: value = "task-3470621" [ 2018.654606] env[68282]: _type = "Task" [ 2018.654606] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.663193] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470621, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.164443] env[68282]: DEBUG oslo_vmware.exceptions [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2019.164790] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.165331] env[68282]: ERROR nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.165331] env[68282]: Faults: ['InvalidArgument'] [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Traceback (most recent call last): [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] yield resources [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self.driver.spawn(context, instance, image_meta, [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self._fetch_image_if_missing(context, vi) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] image_cache(vi, tmp_image_ds_loc) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] vm_util.copy_virtual_disk( [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] session._wait_for_task(vmdk_copy_task) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return self.wait_for_task(task_ref) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return evt.wait() [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] result = hub.switch() [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return self.greenlet.switch() [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self.f(*self.args, **self.kw) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] raise exceptions.translate_fault(task_info.error) [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Faults: ['InvalidArgument'] [ 2019.165331] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] [ 2019.167260] env[68282]: INFO nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Terminating instance [ 2019.167260] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.168024] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2019.168024] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fe05b4f-c301-4105-9540-f5a685282a9c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.170267] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2019.170458] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2019.171259] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d114bcf1-e633-477d-942a-ce602efc3e70 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.178052] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2019.178320] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f4156ae-6441-4707-bdc5-1eade81a255f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.180684] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2019.180861] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2019.181893] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50935f25-7240-4b7c-9596-988eae5a6405 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.187355] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2019.187355] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52e42e60-3e6b-30d3-12ea-fb036da3408d" [ 2019.187355] env[68282]: _type = "Task" [ 2019.187355] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.194748] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52e42e60-3e6b-30d3-12ea-fb036da3408d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.249677] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2019.249953] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2019.250181] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleting the datastore file [datastore2] 7bc5117e-58d1-4c08-b778-7045b1076b94 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2019.250478] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-685720f2-a0c2-4e80-bada-acf8c129274e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.256792] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 2019.256792] env[68282]: value = "task-3470623" [ 2019.256792] env[68282]: _type = "Task" [ 2019.256792] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.265181] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.698221] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2019.698527] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating directory with path [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2019.698666] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1b56ce0-9035-4812-94c4-09158fbcee89 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.710125] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created directory with path [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2019.710318] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Fetch image to [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2019.710491] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2019.711252] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4763481f-9d97-4dc5-8ae7-da5761cf1ed8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.717589] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0084f47-db2f-4e67-a3fd-ddd8ba75955e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.726103] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8ae70d-8c4e-4bb8-813a-63821fcea9ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.756878] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb2792e-279b-439b-8187-0527f3c1d955 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.767438] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-44a850b4-a5fa-4d58-95b9-1a1579bd96ff {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.769096] env[68282]: DEBUG oslo_vmware.api [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084957} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.769338] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2019.769520] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2019.769897] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2019.769897] env[68282]: INFO nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2019.772425] env[68282]: DEBUG nova.compute.claims [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2019.772425] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.772537] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.791567] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2019.846905] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2019.908158] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2019.908369] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2020.000253] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d531e9aa-6842-426a-b7c9-89c5617af677 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.008170] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4786f5-8e76-4c25-bc9d-07b71c522670 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.037441] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff11af5-9169-4e58-bb15-d50899cee9ac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.044263] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4359a48e-ebac-4d77-b76b-51939d7d54df {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.057710] env[68282]: DEBUG nova.compute.provider_tree [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.066254] env[68282]: DEBUG nova.scheduler.client.report [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2020.082064] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.309s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.082690] env[68282]: ERROR nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2020.082690] env[68282]: Faults: ['InvalidArgument'] [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Traceback (most recent call last): [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self.driver.spawn(context, instance, image_meta, [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self._fetch_image_if_missing(context, vi) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] image_cache(vi, tmp_image_ds_loc) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] vm_util.copy_virtual_disk( [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] session._wait_for_task(vmdk_copy_task) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return self.wait_for_task(task_ref) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return evt.wait() [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] result = hub.switch() [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] return self.greenlet.switch() [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] self.f(*self.args, **self.kw) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] raise exceptions.translate_fault(task_info.error) [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Faults: ['InvalidArgument'] [ 2020.082690] env[68282]: ERROR nova.compute.manager [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] [ 2020.083836] env[68282]: DEBUG nova.compute.utils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2020.084865] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Build of instance 7bc5117e-58d1-4c08-b778-7045b1076b94 was re-scheduled: A specified parameter was not correct: fileType [ 2020.084865] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2020.085244] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2020.085421] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2020.085595] env[68282]: DEBUG nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2020.085772] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2020.341239] env[68282]: DEBUG nova.network.neutron [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.353373] env[68282]: INFO nova.compute.manager [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Took 0.27 seconds to deallocate network for instance. [ 2020.441562] env[68282]: INFO nova.scheduler.client.report [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted allocations for instance 7bc5117e-58d1-4c08-b778-7045b1076b94 [ 2020.462447] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b3e6f142-1fa1-411b-996f-0dec9c772cbd tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 500.018s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.462724] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 303.292s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.462963] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.463219] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.463396] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.465410] env[68282]: INFO nova.compute.manager [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Terminating instance [ 2020.467034] env[68282]: DEBUG nova.compute.manager [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2020.467219] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2020.467832] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d420d8e1-9b19-4adc-970c-d611157a8779 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.477430] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4b9220-2589-495b-a9d0-92197b0e1ceb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.507900] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7bc5117e-58d1-4c08-b778-7045b1076b94 could not be found. [ 2020.508124] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2020.508313] env[68282]: INFO nova.compute.manager [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2020.508557] env[68282]: DEBUG oslo.service.loopingcall [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.508766] env[68282]: DEBUG nova.compute.manager [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2020.508895] env[68282]: DEBUG nova.network.neutron [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2020.534281] env[68282]: DEBUG nova.network.neutron [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.542468] env[68282]: INFO nova.compute.manager [-] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] Took 0.03 seconds to deallocate network for instance. [ 2020.632090] env[68282]: DEBUG oslo_concurrency.lockutils [None req-1b818d68-cbe1-4f44-b939-dcebf16a6ffe tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.169s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.632883] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 127.031s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.633089] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 7bc5117e-58d1-4c08-b778-7045b1076b94] During sync_power_state the instance has a pending task (deleting). Skip. [ 2020.633271] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "7bc5117e-58d1-4c08-b778-7045b1076b94" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.087430] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.101536] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.101758] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.101926] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.102247] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2048.103258] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ad16fd-83c2-46e9-801a-25c3e153839d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.112137] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf0bf99-1cf7-4dc0-9254-883a1d947522 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.125884] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17b859f-444e-4a61-924b-204bd2d0bf9b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.131771] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580207be-f094-4745-aea4-6a6dc3468ecb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.160595] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180927MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2048.160849] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.160961] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.228716] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.228876] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229015] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229195] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229334] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229458] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229577] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229692] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229804] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2048.229982] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2048.230134] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2048.330827] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f071d387-ade8-4833-a18b-f4b03bb9bd33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.338400] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cfb90d-8ea2-4df5-83ec-5d87e9f75ed3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.368954] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e156419-6f02-4caf-a99f-c51ff76e2bc8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.375615] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8db053-6d81-4711-88b0-4f8f022606a6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.388194] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2048.396728] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2048.411174] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2048.411374] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.250s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.411983] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.412523] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2049.412523] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2049.431623] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.431854] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.432076] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.432296] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.432504] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.432708] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.432910] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.433126] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.433331] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2049.433534] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2051.086532] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2051.086826] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2053.087806] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.087245] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.087540] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.087650] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.083965] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.086520] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.165263] env[68282]: WARNING oslo_vmware.rw_handles [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2066.165263] env[68282]: ERROR oslo_vmware.rw_handles [ 2066.165874] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2066.168368] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2066.168634] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Copying Virtual Disk [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/7cd02c27-c069-4594-91a6-f05641de8dd6/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2066.168924] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a73f0acc-b740-4be3-9dda-4f23537538b9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.177034] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2066.177034] env[68282]: value = "task-3470624" [ 2066.177034] env[68282]: _type = "Task" [ 2066.177034] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.185285] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.687301] env[68282]: DEBUG oslo_vmware.exceptions [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2066.687626] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.688331] env[68282]: ERROR nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.688331] env[68282]: Faults: ['InvalidArgument'] [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Traceback (most recent call last): [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] yield resources [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self.driver.spawn(context, instance, image_meta, [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self._fetch_image_if_missing(context, vi) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] image_cache(vi, tmp_image_ds_loc) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] vm_util.copy_virtual_disk( [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] session._wait_for_task(vmdk_copy_task) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return self.wait_for_task(task_ref) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return evt.wait() [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] result = hub.switch() [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return self.greenlet.switch() [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self.f(*self.args, **self.kw) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] raise exceptions.translate_fault(task_info.error) [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Faults: ['InvalidArgument'] [ 2066.688331] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] [ 2066.689392] env[68282]: INFO nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Terminating instance [ 2066.690462] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.690669] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.690906] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-afc353b6-c080-4d37-b79f-6740d4545726 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.693041] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2066.693248] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2066.693966] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c37cedf-6707-47b6-ae4d-90293477295d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.700524] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2066.700745] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-686c6989-c774-4b9e-9a91-4f8b27ee61d5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.702747] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.702922] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2066.703852] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-760709a5-5075-46e7-a5ff-5e9baee6213f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.708433] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2066.708433] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52dc684a-cdcb-0a98-7e75-0016bea35470" [ 2066.708433] env[68282]: _type = "Task" [ 2066.708433] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.721378] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2066.721599] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating directory with path [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2066.721803] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7846baf7-aa61-4520-8025-664f31f020de {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.739537] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created directory with path [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2066.739732] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Fetch image to [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2066.739903] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2066.740634] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec352df-dfef-4791-afec-df543cd118c9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.748069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd098f60-31e1-445f-9b84-836f2899a5f6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.756644] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c639d5bf-de91-48fa-abc2-3ce862af21da {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.787181] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41daf366-9c89-46a3-ab3c-e22855433707 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.789521] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2066.789716] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2066.789893] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleting the datastore file [datastore2] 5c2d229f-e14c-43b8-80d1-9232557b7520 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2066.790128] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46e8941c-8931-4faf-aa51-ade87fe1e98d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.794961] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-30b47717-dc5b-4b42-af81-003637ebad7c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.797659] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2066.797659] env[68282]: value = "task-3470626" [ 2066.797659] env[68282]: _type = "Task" [ 2066.797659] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.804773] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.816996] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2066.868333] env[68282]: DEBUG oslo_vmware.rw_handles [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2066.929206] env[68282]: DEBUG oslo_vmware.rw_handles [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2066.929432] env[68282]: DEBUG oslo_vmware.rw_handles [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2067.308362] env[68282]: DEBUG oslo_vmware.api [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066169} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.308671] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2067.308793] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2067.308970] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.309170] env[68282]: INFO nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2067.311406] env[68282]: DEBUG nova.compute.claims [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2067.311624] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.311852] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.464582] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e22bde1-cf08-4921-bb6f-9124c1ed243a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.471688] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e9a219-a759-4b6d-acce-45868776e018 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.502858] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450d8ef8-4ab9-41cd-a822-aa2be57fedd0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.509907] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42c4c03-7180-478e-b69c-0b8538bdcee2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.523007] env[68282]: DEBUG nova.compute.provider_tree [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.534777] env[68282]: DEBUG nova.scheduler.client.report [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2067.549724] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.238s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.550311] env[68282]: ERROR nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.550311] env[68282]: Faults: ['InvalidArgument'] [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Traceback (most recent call last): [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self.driver.spawn(context, instance, image_meta, [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self._fetch_image_if_missing(context, vi) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] image_cache(vi, tmp_image_ds_loc) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] vm_util.copy_virtual_disk( [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] session._wait_for_task(vmdk_copy_task) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return self.wait_for_task(task_ref) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return evt.wait() [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] result = hub.switch() [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] return self.greenlet.switch() [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] self.f(*self.args, **self.kw) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] raise exceptions.translate_fault(task_info.error) [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Faults: ['InvalidArgument'] [ 2067.550311] env[68282]: ERROR nova.compute.manager [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] [ 2067.551383] env[68282]: DEBUG nova.compute.utils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2067.552541] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Build of instance 5c2d229f-e14c-43b8-80d1-9232557b7520 was re-scheduled: A specified parameter was not correct: fileType [ 2067.552541] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2067.552908] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2067.553101] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2067.553284] env[68282]: DEBUG nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2067.553452] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2067.849288] env[68282]: DEBUG nova.network.neutron [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.859716] env[68282]: INFO nova.compute.manager [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Took 0.31 seconds to deallocate network for instance. [ 2067.954126] env[68282]: INFO nova.scheduler.client.report [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleted allocations for instance 5c2d229f-e14c-43b8-80d1-9232557b7520 [ 2067.974668] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a93b9caf-7a57-44f7-ae83-70b7819d2074 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 527.413s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.974940] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 330.946s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.975198] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.975429] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.975887] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.977680] env[68282]: INFO nova.compute.manager [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Terminating instance [ 2067.979623] env[68282]: DEBUG nova.compute.manager [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2067.979842] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2067.980095] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6102d86-9ced-462f-b9fe-1ee16b67daa7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.990446] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c74d04c-c2a6-4772-8681-b1724d85f76f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.017925] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5c2d229f-e14c-43b8-80d1-9232557b7520 could not be found. [ 2068.018140] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2068.018323] env[68282]: INFO nova.compute.manager [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2068.018572] env[68282]: DEBUG oslo.service.loopingcall [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2068.018783] env[68282]: DEBUG nova.compute.manager [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2068.018878] env[68282]: DEBUG nova.network.neutron [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.041480] env[68282]: DEBUG nova.network.neutron [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.048933] env[68282]: INFO nova.compute.manager [-] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] Took 0.03 seconds to deallocate network for instance. [ 2068.129271] env[68282]: DEBUG oslo_concurrency.lockutils [None req-fb0c8c4a-20c1-460b-9662-2509f555181a tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.154s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.130047] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 174.528s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.130250] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 5c2d229f-e14c-43b8-80d1-9232557b7520] During sync_power_state the instance has a pending task (deleting). Skip. [ 2068.130428] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "5c2d229f-e14c-43b8-80d1-9232557b7520" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2109.579371] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.087314] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2110.098978] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.099221] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.099393] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.099588] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2110.100715] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdb3e4e-1140-4a6a-b8e9-03ba0b60ca0b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.109266] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62861da2-d194-40dc-b970-9817660bd925 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.122938] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41324998-d719-4f17-8c2c-d352e0a9db6a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.128969] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fcff87-b36c-43ba-8c51-729b24d4b8b1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.158493] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180898MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2110.158638] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.158817] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.225608] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.225817] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance aff4995e-4c8f-4ced-8743-e6cac0484875 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.225926] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226086] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226231] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226354] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226473] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226591] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2110.226774] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2110.226912] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2110.320494] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96daf269-d58b-444b-9724-5166439b1d3b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.328112] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68450a4-5948-40b3-bf1e-afee25bd3b1c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.358766] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bef59b6-8fa6-4c62-b3b9-ac5bb59296c6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.365953] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a0ddeb-8d6b-4c61-9220-d6736388b605 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.378865] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.387936] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2110.400475] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2110.400653] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.242s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.400730] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2111.401113] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2111.401113] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2111.419758] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.419910] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420044] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420177] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420304] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420427] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420602] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420738] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2111.420861] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2111.421351] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2111.421494] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2113.088063] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.083421] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.086993] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.087884] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.088302] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.088361] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.632614] env[68282]: WARNING oslo_vmware.rw_handles [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2116.632614] env[68282]: ERROR oslo_vmware.rw_handles [ 2116.633197] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2116.635102] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2116.635349] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Copying Virtual Disk [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/5d1adeb1-d94d-4ba2-b75c-04b35e0361d2/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2116.635640] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82a55cf1-d861-4a26-bc96-961544ff3726 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.643132] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2116.643132] env[68282]: value = "task-3470627" [ 2116.643132] env[68282]: _type = "Task" [ 2116.643132] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.650996] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.083131] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.152522] env[68282]: DEBUG oslo_vmware.exceptions [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2117.152866] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.153352] env[68282]: ERROR nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.153352] env[68282]: Faults: ['InvalidArgument'] [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Traceback (most recent call last): [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] yield resources [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self.driver.spawn(context, instance, image_meta, [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self._fetch_image_if_missing(context, vi) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] image_cache(vi, tmp_image_ds_loc) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] vm_util.copy_virtual_disk( [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] session._wait_for_task(vmdk_copy_task) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return self.wait_for_task(task_ref) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return evt.wait() [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] result = hub.switch() [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return self.greenlet.switch() [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self.f(*self.args, **self.kw) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] raise exceptions.translate_fault(task_info.error) [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Faults: ['InvalidArgument'] [ 2117.153352] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] [ 2117.154378] env[68282]: INFO nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Terminating instance [ 2117.155187] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.155391] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.155630] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97e30341-8a80-41b7-bb3c-acd814600435 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.158962] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2117.159168] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2117.159886] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908b01c5-6571-4703-b881-3313b3cfe054 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.166510] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2117.166770] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1658d2ab-4514-47cd-a41c-b6c07a61f427 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.168827] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.169012] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2117.169964] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270dc644-b57c-4358-a2b9-e8e2c09794ec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.175170] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for the task: (returnval){ [ 2117.175170] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521046e0-bcda-a7e7-3671-a109523ef1a7" [ 2117.175170] env[68282]: _type = "Task" [ 2117.175170] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.182088] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521046e0-bcda-a7e7-3671-a109523ef1a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.245802] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2117.245984] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2117.246100] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleting the datastore file [datastore2] b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.246354] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bc1d350-7747-47da-9fc8-0e1754fb6ead {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.252761] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2117.252761] env[68282]: value = "task-3470629" [ 2117.252761] env[68282]: _type = "Task" [ 2117.252761] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.260869] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470629, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.685722] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2117.686009] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Creating directory with path [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2117.686255] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a1e6be8-4fc2-4f36-9486-0c194c1e455b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.697196] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Created directory with path [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2117.697434] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Fetch image to [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2117.697593] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2117.698325] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c251629-c813-4f56-8305-31a8c2d4fe48 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.704814] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afc914f-4331-44c7-a579-3d7fdfaffb2d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.713588] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d51987c-155a-44b4-a7a0-494863dea91e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.525128] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6274c32-a33a-45ee-83c5-5aaa0df05e38 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.528107] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "2c38a690-608e-4531-aeb3-629eb5c09532" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.528335] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.535524] env[68282]: DEBUG oslo_vmware.api [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066945} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.537417] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2118.537417] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2118.537598] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.537757] env[68282]: INFO nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Took 1.38 seconds to destroy the instance on the hypervisor. [ 2118.540217] env[68282]: DEBUG nova.compute.claims [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2118.540299] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.540476] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.544067] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-953c4857-750c-49f4-9657-7ee8cb06c232 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.546020] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2118.571839] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2118.608381] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.709120] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffda716-e721-4dfe-8334-dfc8ffc47b24 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.716284] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d873af-ff89-4de5-b4dd-e74154239cec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.720134] env[68282]: DEBUG oslo_vmware.rw_handles [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2118.805353] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb21334-02f6-4c46-9d48-a5280e078326 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.809637] env[68282]: DEBUG oslo_vmware.rw_handles [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2118.809637] env[68282]: DEBUG oslo_vmware.rw_handles [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2118.816071] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b91a41-0ccf-4520-9203-663e59ad8e9f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.832836] env[68282]: DEBUG nova.compute.provider_tree [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.841752] env[68282]: DEBUG nova.scheduler.client.report [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2118.859147] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.859720] env[68282]: ERROR nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.859720] env[68282]: Faults: ['InvalidArgument'] [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Traceback (most recent call last): [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self.driver.spawn(context, instance, image_meta, [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self._fetch_image_if_missing(context, vi) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] image_cache(vi, tmp_image_ds_loc) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] vm_util.copy_virtual_disk( [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] session._wait_for_task(vmdk_copy_task) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return self.wait_for_task(task_ref) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return evt.wait() [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] result = hub.switch() [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] return self.greenlet.switch() [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] self.f(*self.args, **self.kw) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] raise exceptions.translate_fault(task_info.error) [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Faults: ['InvalidArgument'] [ 2118.859720] env[68282]: ERROR nova.compute.manager [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] [ 2118.861468] env[68282]: DEBUG nova.compute.utils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2118.861525] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.253s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.863277] env[68282]: INFO nova.compute.claims [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2118.866312] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Build of instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 was re-scheduled: A specified parameter was not correct: fileType [ 2118.866312] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2118.866763] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2118.866943] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2118.867134] env[68282]: DEBUG nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2118.867315] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.016756] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de614eb-aa3b-477e-b624-320321f8af30 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.024324] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a201504-d551-4e58-a50e-8094b7e6cf73 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.054883] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f07efa2-f336-4c80-be66-640de303daaf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.062329] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772aad4d-24e6-4d75-a6ed-b79ae543cfba {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.076346] env[68282]: DEBUG nova.compute.provider_tree [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2119.084263] env[68282]: DEBUG nova.scheduler.client.report [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2119.097701] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.236s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.098199] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2119.137340] env[68282]: DEBUG nova.compute.utils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2119.138746] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2119.138947] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2119.151926] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2119.210437] env[68282]: DEBUG nova.policy [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aee8a1e035742e0b67873bfcce2ef72', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3d2a3fac4b04f8fa6622043de5e500d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 2119.229984] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2119.268669] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2119.268847] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2119.269024] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2119.269191] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2119.269343] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2119.269494] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2119.269803] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2119.269968] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2119.270155] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2119.270306] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2119.270485] env[68282]: DEBUG nova.virt.hardware [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2119.271610] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa716b-4a26-48ed-a1eb-dc21e41d72fa {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.274933] env[68282]: DEBUG nova.network.neutron [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.281657] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87f3073-13ab-4a43-85a7-13c206d02389 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.286086] env[68282]: INFO nova.compute.manager [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Took 0.42 seconds to deallocate network for instance. [ 2119.383087] env[68282]: INFO nova.scheduler.client.report [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleted allocations for instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 [ 2119.404898] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4f1c5ed-597e-4fdb-aa0b-c9faf4b71fe6 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 565.524s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.405182] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 369.148s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.405404] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.405605] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.405771] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.409128] env[68282]: INFO nova.compute.manager [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Terminating instance [ 2119.409903] env[68282]: DEBUG nova.compute.manager [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2119.409989] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2119.410632] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3917aef-395e-4f32-926e-26ebc771b66d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.419527] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66b7c35-9f8f-4b7a-9f9c-412c98087a89 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.446450] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28 could not be found. [ 2119.446625] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2119.446791] env[68282]: INFO nova.compute.manager [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2119.447191] env[68282]: DEBUG oslo.service.loopingcall [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2119.447296] env[68282]: DEBUG nova.compute.manager [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2119.447373] env[68282]: DEBUG nova.network.neutron [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2119.471482] env[68282]: DEBUG nova.network.neutron [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.481050] env[68282]: INFO nova.compute.manager [-] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] Took 0.03 seconds to deallocate network for instance. [ 2119.572444] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Successfully created port: a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2119.598922] env[68282]: DEBUG oslo_concurrency.lockutils [None req-57e31ba1-0298-44dd-aec1-6de0805a524a tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.600072] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 225.998s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.600280] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28] During sync_power_state the instance has a pending task (deleting). Skip. [ 2119.600458] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "b52ce0fe-17ef-4fd8-95b8-f9d65fe32c28" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.270763] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Successfully updated port: a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2120.279825] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.279968] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.280138] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2120.326516] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2120.347792] env[68282]: DEBUG nova.compute.manager [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Received event network-vif-plugged-a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2120.348032] env[68282]: DEBUG oslo_concurrency.lockutils [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] Acquiring lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.348246] env[68282]: DEBUG oslo_concurrency.lockutils [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] Lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.348418] env[68282]: DEBUG oslo_concurrency.lockutils [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] Lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2120.348588] env[68282]: DEBUG nova.compute.manager [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] No waiting events found dispatching network-vif-plugged-a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2120.348755] env[68282]: WARNING nova.compute.manager [req-c9c62f47-6e9e-4439-a7df-d5e87eb37783 req-57168232-3e34-476c-8902-236b7f4a0752 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Received unexpected event network-vif-plugged-a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c for instance with vm_state building and task_state spawning. [ 2120.500845] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Updating instance_info_cache with network_info: [{"id": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "address": "fa:16:3e:aa:a5:58", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b34aa1-2b", "ovs_interfaceid": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.514405] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.514661] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance network_info: |[{"id": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "address": "fa:16:3e:aa:a5:58", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b34aa1-2b", "ovs_interfaceid": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2120.515094] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:a5:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2120.522789] env[68282]: DEBUG oslo.service.loopingcall [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.524081] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2120.524081] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-300ea757-073d-4994-acb7-0a0240bc584e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.543515] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2120.543515] env[68282]: value = "task-3470630" [ 2120.543515] env[68282]: _type = "Task" [ 2120.543515] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.550890] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470630, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.053506] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470630, 'name': CreateVM_Task, 'duration_secs': 0.271648} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.053866] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2121.054315] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.054485] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.054803] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2121.055054] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93404f4b-947d-4149-bb75-ba565deea745 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.059141] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2121.059141] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523b2c6b-9e87-2321-d674-72f8862119f7" [ 2121.059141] env[68282]: _type = "Task" [ 2121.059141] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.066249] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523b2c6b-9e87-2321-d674-72f8862119f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.570046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.570309] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.570530] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.376781] env[68282]: DEBUG nova.compute.manager [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Received event network-changed-a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2122.377010] env[68282]: DEBUG nova.compute.manager [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Refreshing instance network info cache due to event network-changed-a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 2122.377225] env[68282]: DEBUG oslo_concurrency.lockutils [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] Acquiring lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.377374] env[68282]: DEBUG oslo_concurrency.lockutils [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] Acquired lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2122.377700] env[68282]: DEBUG nova.network.neutron [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Refreshing network info cache for port a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2122.624418] env[68282]: DEBUG nova.network.neutron [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Updated VIF entry in instance network info cache for port a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2122.624833] env[68282]: DEBUG nova.network.neutron [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Updating instance_info_cache with network_info: [{"id": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "address": "fa:16:3e:aa:a5:58", "network": {"id": "302e4f1f-7d6c-497f-92b3-5d720eb6be3f", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1346561679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3d2a3fac4b04f8fa6622043de5e500d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3b34aa1-2b", "ovs_interfaceid": "a3b34aa1-2b1b-4a7b-ba4b-7c34f7eb5f1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.634207] env[68282]: DEBUG oslo_concurrency.lockutils [req-d6a1d088-2008-43e8-9029-e6e265ba918e req-a6d4715a-7f80-41b3-87e5-5e16b02a57e7 service nova] Releasing lock "refresh_cache-2c38a690-608e-4531-aeb3-629eb5c09532" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.725698] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "14e97724-1044-4f32-ac27-8062120c2c46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.909820] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "41e28779-65ad-476c-bc9c-9747beba2813" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.651847] env[68282]: WARNING oslo_vmware.rw_handles [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.651847] env[68282]: ERROR oslo_vmware.rw_handles [ 2166.652699] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2166.654078] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2166.654337] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Copying Virtual Disk [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/4608dd7b-492c-4433-83d5-2794fd6344ec/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2166.654616] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f9b49b1-f9f9-4b6b-917b-6b14e43b72eb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.662061] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for the task: (returnval){ [ 2166.662061] env[68282]: value = "task-3470631" [ 2166.662061] env[68282]: _type = "Task" [ 2166.662061] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.669971] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Task: {'id': task-3470631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.172150] env[68282]: DEBUG oslo_vmware.exceptions [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2167.172390] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.172949] env[68282]: ERROR nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.172949] env[68282]: Faults: ['InvalidArgument'] [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Traceback (most recent call last): [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] yield resources [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self.driver.spawn(context, instance, image_meta, [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self._fetch_image_if_missing(context, vi) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] image_cache(vi, tmp_image_ds_loc) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] vm_util.copy_virtual_disk( [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] session._wait_for_task(vmdk_copy_task) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return self.wait_for_task(task_ref) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return evt.wait() [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] result = hub.switch() [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return self.greenlet.switch() [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self.f(*self.args, **self.kw) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] raise exceptions.translate_fault(task_info.error) [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Faults: ['InvalidArgument'] [ 2167.172949] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] [ 2167.174086] env[68282]: INFO nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Terminating instance [ 2167.174815] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.175038] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.175280] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91a4898d-e8ab-429e-956a-80ac05dff798 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.177413] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2167.177617] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2167.178319] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aad704a-04c1-476a-b980-c66a5bcda08a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.185108] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2167.185345] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f1a91b0-1ffd-4587-bbbc-c5b5730cd8c5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.187483] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.187662] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2167.188640] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eefcb0cc-8727-4018-bfeb-481482cb0736 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.193089] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2167.193089] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5205d97f-9c45-5120-e11f-cc4305c78803" [ 2167.193089] env[68282]: _type = "Task" [ 2167.193089] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.200184] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5205d97f-9c45-5120-e11f-cc4305c78803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.255214] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2167.255424] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2167.255622] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Deleting the datastore file [datastore2] aff4995e-4c8f-4ced-8743-e6cac0484875 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.255918] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7e4c9c8-05c8-4c59-aa51-43b3e960f26d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.262207] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for the task: (returnval){ [ 2167.262207] env[68282]: value = "task-3470633" [ 2167.262207] env[68282]: _type = "Task" [ 2167.262207] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.271763] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Task: {'id': task-3470633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.703360] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2167.703712] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.703836] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0e2c0c4-3442-46d2-ac29-3576d39c2619 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.714396] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.714582] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Fetch image to [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2167.714743] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2167.715436] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0aa4d8-ba65-454c-9c1d-464bc1d1e3be {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.721557] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcc05c1-dff1-4f71-8cc3-4dc17b0d6578 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.730125] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0a13a0-c52b-4648-9e3e-4b85d05f11ee {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.759993] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddae5f6-8234-4d45-8481-f0a04f8fb7f5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.766699] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-70f3e481-c78d-4093-a5bd-360043938be5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.770793] env[68282]: DEBUG oslo_vmware.api [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Task: {'id': task-3470633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065948} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.771303] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2167.771486] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2167.771702] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2167.771892] env[68282]: INFO nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2167.773925] env[68282]: DEBUG nova.compute.claims [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2167.774115] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.774329] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.789150] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2167.842479] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2167.904739] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2167.904942] env[68282]: DEBUG oslo_vmware.rw_handles [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2167.969098] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5980f0ab-1954-43a2-90ec-a477f077ad84 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.976687] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9ce811-ce2b-4836-8586-f29bbb8ca9bb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.007137] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bdb3b1-9651-4601-88a8-d11116ac49cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.013976] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beacac8d-7da4-4ab9-b8dc-fb4e9f38ccfd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.026674] env[68282]: DEBUG nova.compute.provider_tree [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.035601] env[68282]: DEBUG nova.scheduler.client.report [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2168.049539] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.275s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.050077] env[68282]: ERROR nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.050077] env[68282]: Faults: ['InvalidArgument'] [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Traceback (most recent call last): [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self.driver.spawn(context, instance, image_meta, [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self._fetch_image_if_missing(context, vi) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] image_cache(vi, tmp_image_ds_loc) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] vm_util.copy_virtual_disk( [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] session._wait_for_task(vmdk_copy_task) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return self.wait_for_task(task_ref) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return evt.wait() [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] result = hub.switch() [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] return self.greenlet.switch() [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] self.f(*self.args, **self.kw) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] raise exceptions.translate_fault(task_info.error) [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Faults: ['InvalidArgument'] [ 2168.050077] env[68282]: ERROR nova.compute.manager [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] [ 2168.050954] env[68282]: DEBUG nova.compute.utils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2168.052310] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Build of instance aff4995e-4c8f-4ced-8743-e6cac0484875 was re-scheduled: A specified parameter was not correct: fileType [ 2168.052310] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2168.052681] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2168.052865] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2168.053122] env[68282]: DEBUG nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2168.053306] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.396958] env[68282]: DEBUG nova.network.neutron [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.414139] env[68282]: INFO nova.compute.manager [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Took 0.36 seconds to deallocate network for instance. [ 2168.540808] env[68282]: INFO nova.scheduler.client.report [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Deleted allocations for instance aff4995e-4c8f-4ced-8743-e6cac0484875 [ 2168.568145] env[68282]: DEBUG oslo_concurrency.lockutils [None req-316f3efc-308d-4b1b-975f-2c2261ec7348 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 570.805s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.568403] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 374.962s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.568690] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Acquiring lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.568997] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.569325] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.571857] env[68282]: INFO nova.compute.manager [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Terminating instance [ 2168.574141] env[68282]: DEBUG nova.compute.manager [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2168.574409] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2168.574810] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e58cdd62-e34b-4aab-9148-e2fe8e51a2d8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.583705] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392ce030-df76-4b9b-a2d2-0ad8fce5d10e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.610198] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aff4995e-4c8f-4ced-8743-e6cac0484875 could not be found. [ 2168.610410] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2168.610626] env[68282]: INFO nova.compute.manager [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2168.610882] env[68282]: DEBUG oslo.service.loopingcall [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2168.611392] env[68282]: DEBUG nova.compute.manager [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2168.611483] env[68282]: DEBUG nova.network.neutron [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.645362] env[68282]: DEBUG nova.network.neutron [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.653052] env[68282]: INFO nova.compute.manager [-] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] Took 0.04 seconds to deallocate network for instance. [ 2168.735962] env[68282]: DEBUG oslo_concurrency.lockutils [None req-e1f35613-6d3a-4e1e-bd77-2d6c9e3101f9 tempest-AttachVolumeNegativeTest-884077893 tempest-AttachVolumeNegativeTest-884077893-project-member] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.736808] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 275.135s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.737143] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: aff4995e-4c8f-4ced-8743-e6cac0484875] During sync_power_state the instance has a pending task (deleting). Skip. [ 2168.737226] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "aff4995e-4c8f-4ced-8743-e6cac0484875" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.087486] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2170.096181] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2170.108718] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.108939] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.109122] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.109282] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2170.110804] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a174b1-724f-4fc5-831a-f7d0cc18561f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.119052] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6322b8c0-6cd0-4239-9d2d-b5778672c9ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.132668] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644d2f5b-fc31-4106-af30-78dd5bb1d445 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.138556] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7fa504-21f3-4b82-8225-2cc72bd29ed1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.167088] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2170.167240] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.167447] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.232184] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 423ea779-232c-4662-acbd-9d2edec5867b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.232345] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.232475] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.232598] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.232720] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.232837] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.233047] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2170.233244] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2170.233383] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2170.322433] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223e0cdd-3907-4dd6-87b4-1b92248cd4a4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.331109] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49439e5-b4fc-4e80-9e77-eb8d7ac16023 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.362551] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49333077-89a7-434b-8510-daf744001643 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.370193] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048c1cff-71a2-42a7-9b54-14fe56d4270e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.384151] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2170.392590] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2170.405273] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2170.405440] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.238s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.087600] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2172.087928] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2172.088081] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2172.088250] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 2172.097477] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 2173.097337] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2173.097642] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2173.097642] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2173.114070] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.114289] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.114392] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.114504] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.114833] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.115013] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.115156] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2173.115286] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2174.086700] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2175.088074] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.088294] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.082791] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.086373] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.086572] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.087643] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.087965] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 2213.705376] env[68282]: WARNING oslo_vmware.rw_handles [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2213.705376] env[68282]: ERROR oslo_vmware.rw_handles [ 2213.705917] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2213.707603] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2213.707844] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Copying Virtual Disk [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/43242257-587f-48f7-87b9-4d2a73a8fb42/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2213.708145] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60140608-bdd3-4368-a8cd-addaf0222129 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.716200] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2213.716200] env[68282]: value = "task-3470634" [ 2213.716200] env[68282]: _type = "Task" [ 2213.716200] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.723632] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.226341] env[68282]: DEBUG oslo_vmware.exceptions [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2214.226598] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2214.227161] env[68282]: ERROR nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2214.227161] env[68282]: Faults: ['InvalidArgument'] [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Traceback (most recent call last): [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] yield resources [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self.driver.spawn(context, instance, image_meta, [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self._fetch_image_if_missing(context, vi) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] image_cache(vi, tmp_image_ds_loc) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] vm_util.copy_virtual_disk( [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] session._wait_for_task(vmdk_copy_task) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return self.wait_for_task(task_ref) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return evt.wait() [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] result = hub.switch() [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return self.greenlet.switch() [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self.f(*self.args, **self.kw) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] raise exceptions.translate_fault(task_info.error) [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Faults: ['InvalidArgument'] [ 2214.227161] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] [ 2214.227877] env[68282]: INFO nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Terminating instance [ 2214.229119] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2214.229348] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2214.229602] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7614ba06-d51b-4529-81b2-3525d90ef493 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.231775] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2214.231967] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2214.232681] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10839331-4598-46cf-a5ed-5e5f07dab0e5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.239624] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2214.240580] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10089ba6-9241-4d83-9c12-42a0775c4435 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.241931] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2214.242120] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2214.242771] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ce22a34-d3ac-4b10-9d13-715632af2b01 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.247481] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for the task: (returnval){ [ 2214.247481] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52465592-41b0-f530-9664-6f3969c3026a" [ 2214.247481] env[68282]: _type = "Task" [ 2214.247481] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.255608] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52465592-41b0-f530-9664-6f3969c3026a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.309503] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2214.309719] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2214.309908] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleting the datastore file [datastore2] 423ea779-232c-4662-acbd-9d2edec5867b {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2214.310197] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-711ee36f-18f4-47d5-ae0e-0432eb30a0dc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.315844] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2214.315844] env[68282]: value = "task-3470636" [ 2214.315844] env[68282]: _type = "Task" [ 2214.315844] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.323428] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2214.758068] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2214.758068] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Creating directory with path [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2214.758068] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1999b802-26ae-4809-b5ae-e44d2c61fcf0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.768940] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Created directory with path [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2214.769164] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Fetch image to [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2214.769342] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2214.770134] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c412d9-ef1a-4291-945a-90d7da6a7a3e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.776744] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8f13a3-d34f-44e5-b4d9-177f4657e82f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.785730] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57e3544-1492-4655-9a90-6c02fef505da {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.819623] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae1a2ca-894a-4a72-9fed-ff287190f33c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.826718] env[68282]: DEBUG oslo_vmware.api [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076097} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2214.828227] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2214.828427] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2214.828600] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2214.828778] env[68282]: INFO nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2214.830565] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4ce38cb4-da6c-4164-ace4-2ff4a0a67ceb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.832524] env[68282]: DEBUG nova.compute.claims [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2214.832740] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.832963] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.853386] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2214.981602] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8fd1c6-ad7c-4bb8-be94-ce4aff3a6e36 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.989029] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65e4584-fc96-4fd0-b865-64810497f848 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.020164] env[68282]: DEBUG oslo_vmware.rw_handles [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2215.022016] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ff0b56-4fcb-476c-89b8-a809a11a8018 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.081675] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b728fa5a-8ea0-47a6-8078-f52454308d20 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.088401] env[68282]: DEBUG oslo_vmware.rw_handles [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2215.088665] env[68282]: DEBUG oslo_vmware.rw_handles [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2215.099312] env[68282]: DEBUG nova.compute.provider_tree [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2215.106522] env[68282]: DEBUG nova.scheduler.client.report [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2215.124041] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.289s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.124041] env[68282]: ERROR nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.124041] env[68282]: Faults: ['InvalidArgument'] [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Traceback (most recent call last): [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self.driver.spawn(context, instance, image_meta, [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self._fetch_image_if_missing(context, vi) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] image_cache(vi, tmp_image_ds_loc) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] vm_util.copy_virtual_disk( [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] session._wait_for_task(vmdk_copy_task) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return self.wait_for_task(task_ref) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return evt.wait() [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] result = hub.switch() [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] return self.greenlet.switch() [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] self.f(*self.args, **self.kw) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] raise exceptions.translate_fault(task_info.error) [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Faults: ['InvalidArgument'] [ 2215.124041] env[68282]: ERROR nova.compute.manager [instance: 423ea779-232c-4662-acbd-9d2edec5867b] [ 2215.124889] env[68282]: DEBUG nova.compute.utils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2215.125947] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Build of instance 423ea779-232c-4662-acbd-9d2edec5867b was re-scheduled: A specified parameter was not correct: fileType [ 2215.125947] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2215.126440] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2215.126611] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2215.126785] env[68282]: DEBUG nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2215.126950] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2215.426268] env[68282]: DEBUG nova.network.neutron [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2215.441267] env[68282]: INFO nova.compute.manager [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Took 0.31 seconds to deallocate network for instance. [ 2215.533041] env[68282]: INFO nova.scheduler.client.report [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted allocations for instance 423ea779-232c-4662-acbd-9d2edec5867b [ 2215.552273] env[68282]: DEBUG oslo_concurrency.lockutils [None req-a0586e79-7b14-4d6c-a04f-758135ed0390 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 489.367s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.552439] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "423ea779-232c-4662-acbd-9d2edec5867b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 321.950s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.552672] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] During sync_power_state the instance has a pending task (spawning). Skip. [ 2215.552953] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "423ea779-232c-4662-acbd-9d2edec5867b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.553532] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 294.025s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.553813] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "423ea779-232c-4662-acbd-9d2edec5867b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.554086] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.554242] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.556682] env[68282]: INFO nova.compute.manager [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Terminating instance [ 2215.558503] env[68282]: DEBUG nova.compute.manager [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2215.558697] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2215.558955] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef459d69-77dd-416f-ad95-153a3d1d598e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.569022] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f297233d-00c3-440c-abf7-65d4b5d253d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.595613] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 423ea779-232c-4662-acbd-9d2edec5867b could not be found. [ 2215.595831] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2215.596022] env[68282]: INFO nova.compute.manager [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2215.596275] env[68282]: DEBUG oslo.service.loopingcall [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2215.596775] env[68282]: DEBUG nova.compute.manager [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2215.596883] env[68282]: DEBUG nova.network.neutron [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2215.620921] env[68282]: DEBUG nova.network.neutron [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2215.628999] env[68282]: INFO nova.compute.manager [-] [instance: 423ea779-232c-4662-acbd-9d2edec5867b] Took 0.03 seconds to deallocate network for instance. [ 2215.713126] env[68282]: DEBUG oslo_concurrency.lockutils [None req-83b3b36c-e906-46cb-8884-198a513862b9 tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "423ea779-232c-4662-acbd-9d2edec5867b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.097808] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.110298] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.110521] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.110696] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.110886] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2231.112524] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ac76ed-e38d-4419-a3d5-e089621669de {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.121053] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae320f88-e571-4579-a44e-01ca9f9ea035 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.135835] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cb7d57-41bd-46de-9c94-86b38dda329a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.141898] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1433f3-c953-468c-a06f-95c7e1de1a21 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.170475] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180944MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2231.170627] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.170809] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.301158] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.301376] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.301560] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.301733] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.301915] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.302151] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2231.302498] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2231.302728] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2231.319253] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2231.332226] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2231.332415] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2231.343126] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2231.361872] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2231.440042] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7518a304-7f5b-4843-b2e0-839e94634ede {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.447487] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547dd279-084c-4fc1-8d19-e542efb93053 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.478844] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15641a0e-19a6-475a-abd8-9bd8d015c1a0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.485539] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb62ffc5-196f-4af7-a9a0-ea6ca3eb846f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.498202] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2231.506738] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2231.519767] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2231.520016] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.349s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.509459] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2232.509784] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2234.087427] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2234.087788] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2234.087788] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2234.103734] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.103904] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.104898] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.104898] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.104898] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.104898] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2234.104898] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2236.087566] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2236.087566] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.088865] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.087841] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.087841] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.083453] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2240.102026] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.722062] env[68282]: WARNING oslo_vmware.rw_handles [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2263.722062] env[68282]: ERROR oslo_vmware.rw_handles [ 2263.722707] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2263.724483] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2263.724753] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Copying Virtual Disk [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/140258c2-9010-4219-a860-becaf28a51e5/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2263.725064] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b53abf01-6c72-438d-91f4-65be963be402 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.736689] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for the task: (returnval){ [ 2263.736689] env[68282]: value = "task-3470637" [ 2263.736689] env[68282]: _type = "Task" [ 2263.736689] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.744343] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Task: {'id': task-3470637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.247957] env[68282]: DEBUG oslo_vmware.exceptions [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2264.247957] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2264.248160] env[68282]: ERROR nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2264.248160] env[68282]: Faults: ['InvalidArgument'] [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Traceback (most recent call last): [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] yield resources [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self.driver.spawn(context, instance, image_meta, [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self._fetch_image_if_missing(context, vi) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] image_cache(vi, tmp_image_ds_loc) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] vm_util.copy_virtual_disk( [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] session._wait_for_task(vmdk_copy_task) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return self.wait_for_task(task_ref) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return evt.wait() [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] result = hub.switch() [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return self.greenlet.switch() [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self.f(*self.args, **self.kw) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] raise exceptions.translate_fault(task_info.error) [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Faults: ['InvalidArgument'] [ 2264.248160] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] [ 2264.248861] env[68282]: INFO nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Terminating instance [ 2264.250032] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2264.250238] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2264.250494] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ee30ae4-e4c9-452a-976e-69ea994e363e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.252919] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2264.252919] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2264.253508] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0612aa-f9b2-4f73-94c6-8cae71de4e19 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.260326] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2264.260534] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc9fbba6-a661-4aba-88c0-94eb3120465a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.262588] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2264.262756] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2264.263783] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bce579f-3296-4d7b-bfe4-5a7c2b3c1aa1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.268249] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for the task: (returnval){ [ 2264.268249] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523eff51-0322-d9ea-926f-51835c5796ab" [ 2264.268249] env[68282]: _type = "Task" [ 2264.268249] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.276161] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523eff51-0322-d9ea-926f-51835c5796ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.327094] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2264.327331] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2264.327516] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Deleting the datastore file [datastore2] e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2264.327785] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-607bc731-67cc-469b-96d5-25dbc5c2c736 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.333980] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for the task: (returnval){ [ 2264.333980] env[68282]: value = "task-3470639" [ 2264.333980] env[68282]: _type = "Task" [ 2264.333980] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.341634] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Task: {'id': task-3470639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.778108] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2264.778402] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Creating directory with path [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2264.778590] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16c2c3e4-5fbb-4ea7-b3c3-8e2358b1967a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.789699] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Created directory with path [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2264.789884] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Fetch image to [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2264.790069] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2264.790799] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320d4a13-8681-419b-8d36-c8039518425f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.797030] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be27690f-4066-4ae7-ba31-07476b94a5cc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.805689] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d9a32f-f252-4e56-9377-246434624ec3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.837985] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af4a3d4-6c08-4454-8c17-814e55ba8124 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.844453] env[68282]: DEBUG oslo_vmware.api [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Task: {'id': task-3470639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071492} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2264.845833] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2264.846033] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2264.846213] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2264.846387] env[68282]: INFO nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2264.848126] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-226d7de2-5ffb-4d9e-99f3-65238a43e433 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.849917] env[68282]: DEBUG nova.compute.claims [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2264.850117] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.850335] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.872150] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2264.924700] env[68282]: DEBUG oslo_vmware.rw_handles [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2264.986618] env[68282]: DEBUG oslo_vmware.rw_handles [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2264.986824] env[68282]: DEBUG oslo_vmware.rw_handles [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2265.025230] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98316d94-1876-4796-b6bf-c26a177b6800 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.034202] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86dc733-a7ae-4765-8dd5-825c7cfb3a62 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.063560] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b58b75-0d63-45b8-bde9-80284ae6f921 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.070200] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed65ac2-e15a-4cd0-8ad7-44b4d5f96fa0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.083254] env[68282]: DEBUG nova.compute.provider_tree [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2265.092259] env[68282]: DEBUG nova.scheduler.client.report [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2265.105124] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.255s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.105660] env[68282]: ERROR nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.105660] env[68282]: Faults: ['InvalidArgument'] [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Traceback (most recent call last): [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self.driver.spawn(context, instance, image_meta, [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self._fetch_image_if_missing(context, vi) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] image_cache(vi, tmp_image_ds_loc) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] vm_util.copy_virtual_disk( [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] session._wait_for_task(vmdk_copy_task) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return self.wait_for_task(task_ref) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return evt.wait() [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] result = hub.switch() [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] return self.greenlet.switch() [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] self.f(*self.args, **self.kw) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] raise exceptions.translate_fault(task_info.error) [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Faults: ['InvalidArgument'] [ 2265.105660] env[68282]: ERROR nova.compute.manager [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] [ 2265.106831] env[68282]: DEBUG nova.compute.utils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2265.107882] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Build of instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 was re-scheduled: A specified parameter was not correct: fileType [ 2265.107882] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2265.108273] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2265.108450] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2265.108628] env[68282]: DEBUG nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2265.108795] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2265.391985] env[68282]: DEBUG nova.network.neutron [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.405556] env[68282]: INFO nova.compute.manager [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Took 0.30 seconds to deallocate network for instance. [ 2265.502754] env[68282]: INFO nova.scheduler.client.report [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Deleted allocations for instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 [ 2265.528798] env[68282]: DEBUG oslo_concurrency.lockutils [None req-94eb43ba-cc5a-4352-b860-d65080457798 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 490.806s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.529094] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 371.927s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.529294] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] During sync_power_state the instance has a pending task (spawning). Skip. [ 2265.529472] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.530141] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 294.294s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.530384] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Acquiring lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.530476] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.530649] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2265.532736] env[68282]: INFO nova.compute.manager [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Terminating instance [ 2265.534596] env[68282]: DEBUG nova.compute.manager [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2265.534830] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2265.535419] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-225d5370-d969-49de-8ec6-5da63ce24ab7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.544221] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd438b07-f281-48e6-a350-0ca5471d8e49 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.570813] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e8d1996c-f4aa-4c18-9a68-7da964aa1ab8 could not be found. [ 2265.571030] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2265.571217] env[68282]: INFO nova.compute.manager [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2265.571461] env[68282]: DEBUG oslo.service.loopingcall [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2265.571672] env[68282]: DEBUG nova.compute.manager [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2265.571767] env[68282]: DEBUG nova.network.neutron [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2265.593481] env[68282]: DEBUG nova.network.neutron [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2265.600923] env[68282]: INFO nova.compute.manager [-] [instance: e8d1996c-f4aa-4c18-9a68-7da964aa1ab8] Took 0.03 seconds to deallocate network for instance. [ 2265.687295] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9a82f36b-3fee-4242-a499-3ffdd9b98420 tempest-AttachVolumeTestJSON-645852486 tempest-AttachVolumeTestJSON-645852486-project-member] Lock "e8d1996c-f4aa-4c18-9a68-7da964aa1ab8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.087625] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.087919] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2293.087919] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2293.100554] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2293.100769] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2293.100942] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.101115] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2293.102301] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60751ec-ee2d-447e-b2e9-f3ac33ea3bed {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.111127] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc80b60-88d5-470d-a775-af8f1c19bfec {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.124846] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47d53d9-2718-4729-8c39-f3d25db5713a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.131324] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88135718-1b17-4b7d-a9ac-44f25e916358 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.161837] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180941MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2293.161837] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2293.161996] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2293.219017] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2293.219196] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2293.219333] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2293.219459] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2293.219578] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2293.219765] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2293.219906] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2293.293522] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27105062-ca4f-4947-bc73-f03bb664efb2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.302344] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c5b02c-94c8-42f1-85f9-a87d952101be {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.332804] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fce603-f028-4972-9b65-52084cabf141 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.340136] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b41dc09-c4c1-43fb-8b20-db13c87dbc71 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.353398] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2293.362569] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2293.376964] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2293.377197] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.215s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.377584] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.377855] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2295.377894] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2295.392973] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2295.393145] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2295.393285] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2295.393410] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2295.393531] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2295.393652] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2297.087820] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2297.087820] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.088060] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.087444] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.087726] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2302.083819] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.234926] env[68282]: WARNING oslo_vmware.rw_handles [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2311.234926] env[68282]: ERROR oslo_vmware.rw_handles [ 2311.235580] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2311.237094] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2311.237350] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Copying Virtual Disk [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/3805101b-ac53-469b-b3dd-52e437f6f2a7/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2311.237639] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f359fba3-276e-4d65-9905-b522f6f65dfc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.245915] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for the task: (returnval){ [ 2311.245915] env[68282]: value = "task-3470640" [ 2311.245915] env[68282]: _type = "Task" [ 2311.245915] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.254175] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Task: {'id': task-3470640, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.665090] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "2d391110-46da-475d-b324-d4bb6e13b4fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.665090] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Lock "2d391110-46da-475d-b324-d4bb6e13b4fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.679702] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2311.732012] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2311.732354] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.734159] env[68282]: INFO nova.compute.claims [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2311.758698] env[68282]: DEBUG oslo_vmware.exceptions [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2311.758979] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2311.759669] env[68282]: ERROR nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.759669] env[68282]: Faults: ['InvalidArgument'] [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Traceback (most recent call last): [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] yield resources [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self.driver.spawn(context, instance, image_meta, [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self._fetch_image_if_missing(context, vi) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] image_cache(vi, tmp_image_ds_loc) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] vm_util.copy_virtual_disk( [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] session._wait_for_task(vmdk_copy_task) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return self.wait_for_task(task_ref) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return evt.wait() [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] result = hub.switch() [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return self.greenlet.switch() [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self.f(*self.args, **self.kw) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] raise exceptions.translate_fault(task_info.error) [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Faults: ['InvalidArgument'] [ 2311.759669] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] [ 2311.760509] env[68282]: INFO nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Terminating instance [ 2311.761587] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.761587] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.762043] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2311.762204] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2311.762370] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2311.763266] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7389b4fa-f0ab-48e0-a253-258bad8b002f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.772762] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.772942] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2311.774428] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88237f4c-d877-430a-8d84-0c9da32d0606 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.782491] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 2311.782491] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52a91bf3-72ef-5f06-129c-5e1db2016160" [ 2311.782491] env[68282]: _type = "Task" [ 2311.782491] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.789854] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52a91bf3-72ef-5f06-129c-5e1db2016160, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.881319] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2311.900756] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c0e6997-dd46-4dfe-b143-607db75aa163 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.908053] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2bf83b-c64b-48b2-8573-422e1c121177 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.941012] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf371de-d361-47c5-94de-6dfe390e6cb6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.948447] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0829c4-c8c8-42f9-a307-e5e70ff59b34 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.952870] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2311.963289] env[68282]: DEBUG nova.compute.provider_tree [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2311.965018] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Releasing lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2311.965490] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2311.965685] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2311.966792] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2f3251-eb85-4746-bf2f-53ef4e992583 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.975020] env[68282]: DEBUG nova.scheduler.client.report [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2311.976922] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2311.977282] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adb149f2-fcd9-4143-9e98-c21ac66aa4ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.984899] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.253s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.000631] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "620b3e29-f22c-40f4-bf45-8240ecf012ad" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.000888] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Lock "620b3e29-f22c-40f4-bf45-8240ecf012ad" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.007191] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Lock "620b3e29-f22c-40f4-bf45-8240ecf012ad" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.006s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.007635] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2312.010558] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2312.010749] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2312.010925] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Deleting the datastore file [datastore2] 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2312.011360] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7e2f34f-5dcf-47bd-a43d-a946a0bcfe58 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.018339] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for the task: (returnval){ [ 2312.018339] env[68282]: value = "task-3470642" [ 2312.018339] env[68282]: _type = "Task" [ 2312.018339] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2312.026299] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Task: {'id': task-3470642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2312.042379] env[68282]: DEBUG nova.compute.utils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2312.043784] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2312.043974] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2312.054260] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2312.097031] env[68282]: DEBUG nova.policy [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1e41a0033964aef9ba8915da1318cc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '967c5630b65447bba9ab0783fd45e8f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 2312.122045] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2312.149177] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2312.149438] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2312.149604] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2312.149787] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2312.149935] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2312.150103] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2312.150318] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2312.150480] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2312.150648] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2312.150813] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2312.150991] env[68282]: DEBUG nova.virt.hardware [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2312.151914] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e845b821-4409-450d-8ad8-5b060c19750c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.160079] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c59971-24ee-4bcb-8143-808f1d41040f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.292330] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2312.292623] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating directory with path [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2312.292838] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc17598f-881e-4959-8409-4d2389fc2465 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.304125] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Created directory with path [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2312.304331] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Fetch image to [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2312.304504] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2312.305268] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baf69aa-98ad-4f23-9c35-488f3aabefcd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.311974] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873cf2b1-1d65-4c76-9c79-b0f87fa062ac {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.322692] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055312ca-56c1-4720-94f0-6c5d39f5a46a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.351489] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da65927-2480-4841-a53e-8b0f1bd7c4b2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.357512] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6ef0f57c-d122-4806-a066-db8f43655f73 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.382136] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2312.410240] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Successfully created port: b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2312.448875] env[68282]: DEBUG oslo_vmware.rw_handles [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2312.512444] env[68282]: DEBUG oslo_vmware.rw_handles [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2312.512661] env[68282]: DEBUG oslo_vmware.rw_handles [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2312.528591] env[68282]: DEBUG oslo_vmware.api [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Task: {'id': task-3470642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.052174} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.528858] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2312.529079] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2312.529280] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2312.529474] env[68282]: INFO nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2312.529731] env[68282]: DEBUG oslo.service.loopingcall [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2312.529939] env[68282]: DEBUG nova.compute.manager [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2312.532369] env[68282]: DEBUG nova.compute.claims [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2312.532542] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.532954] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.660248] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45cdce4-c51c-4134-be42-5cfa8a057e61 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.669104] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5d9b31-91e3-4cae-93fc-ba563448ef53 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.701532] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a8fd9b-f210-4f0f-813a-5754f2d0bbf1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.708728] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bb2044-a73a-4805-b70e-f17d08b86982 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.723161] env[68282]: DEBUG nova.compute.provider_tree [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.732048] env[68282]: DEBUG nova.scheduler.client.report [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2312.745629] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.213s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.746183] env[68282]: ERROR nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.746183] env[68282]: Faults: ['InvalidArgument'] [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Traceback (most recent call last): [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self.driver.spawn(context, instance, image_meta, [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self._fetch_image_if_missing(context, vi) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] image_cache(vi, tmp_image_ds_loc) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] vm_util.copy_virtual_disk( [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] session._wait_for_task(vmdk_copy_task) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return self.wait_for_task(task_ref) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return evt.wait() [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] result = hub.switch() [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] return self.greenlet.switch() [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] self.f(*self.args, **self.kw) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] raise exceptions.translate_fault(task_info.error) [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Faults: ['InvalidArgument'] [ 2312.746183] env[68282]: ERROR nova.compute.manager [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] [ 2312.746939] env[68282]: DEBUG nova.compute.utils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2312.748377] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Build of instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 was re-scheduled: A specified parameter was not correct: fileType [ 2312.748377] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2312.748733] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2312.748978] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2312.749150] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2312.749303] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2312.775349] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2312.894065] env[68282]: DEBUG nova.network.neutron [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2312.904213] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Releasing lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2312.904667] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2312.904996] env[68282]: DEBUG nova.compute.manager [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Skipping network deallocation for instance since networking was not requested. {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2312.954892] env[68282]: DEBUG nova.compute.manager [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Received event network-vif-plugged-b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2312.955137] env[68282]: DEBUG oslo_concurrency.lockutils [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] Acquiring lock "2d391110-46da-475d-b324-d4bb6e13b4fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2312.955352] env[68282]: DEBUG oslo_concurrency.lockutils [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] Lock "2d391110-46da-475d-b324-d4bb6e13b4fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2312.955575] env[68282]: DEBUG oslo_concurrency.lockutils [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] Lock "2d391110-46da-475d-b324-d4bb6e13b4fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2312.955768] env[68282]: DEBUG nova.compute.manager [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] No waiting events found dispatching network-vif-plugged-b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2312.955937] env[68282]: WARNING nova.compute.manager [req-2c022ea0-668c-4165-8162-5e5f8c66772e req-85bf821a-94e1-4816-b557-dea08e3f063a service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Received unexpected event network-vif-plugged-b487dbb4-8ed9-470d-a761-b7f41f4a04c6 for instance with vm_state building and task_state spawning. [ 2312.997065] env[68282]: INFO nova.scheduler.client.report [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Deleted allocations for instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 [ 2313.018020] env[68282]: DEBUG oslo_concurrency.lockutils [None req-600ca59d-6728-4ca3-a390-80be02f3a8ca tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 528.380s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.018188] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 419.415s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.018925] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] During sync_power_state the instance has a pending task (spawning). Skip. [ 2313.018925] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.018925] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 331.857s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.019186] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.019244] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2313.019730] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.021931] env[68282]: INFO nova.compute.manager [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Terminating instance [ 2313.022886] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquiring lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2313.023543] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Acquired lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.023543] env[68282]: DEBUG nova.network.neutron [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2313.038385] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Successfully updated port: b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2313.048153] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2313.048297] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquired lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.048442] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2313.053591] env[68282]: DEBUG nova.network.neutron [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2313.092418] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2313.163698] env[68282]: DEBUG nova.network.neutron [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.173677] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Releasing lock "refresh_cache-8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2313.174112] env[68282]: DEBUG nova.compute.manager [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2313.174345] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2313.174911] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-927e7002-3f46-47f3-a956-4f6449de8519 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.184608] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ed1c2e-ab8c-4dfa-8d0e-c8ee27914dcc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.211538] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9 could not be found. [ 2313.211746] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2313.211928] env[68282]: INFO nova.compute.manager [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2313.212190] env[68282]: DEBUG oslo.service.loopingcall [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2313.214547] env[68282]: DEBUG nova.compute.manager [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2313.214648] env[68282]: DEBUG nova.network.neutron [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2313.231394] env[68282]: DEBUG nova.network.neutron [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2313.238827] env[68282]: DEBUG nova.network.neutron [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.246627] env[68282]: INFO nova.compute.manager [-] [instance: 8ec6d449-57b1-40fb-9df9-ef1c3b69cde9] Took 0.03 seconds to deallocate network for instance. [ 2313.259361] env[68282]: DEBUG nova.network.neutron [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Updating instance_info_cache with network_info: [{"id": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "address": "fa:16:3e:56:21:63", "network": {"id": "30d27ef3-bf9c-49b3-9d47-f54c2e695d8a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562031578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "967c5630b65447bba9ab0783fd45e8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb487dbb4-8e", "ovs_interfaceid": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2313.270674] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Releasing lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2313.270957] env[68282]: DEBUG nova.compute.manager [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Instance network_info: |[{"id": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "address": "fa:16:3e:56:21:63", "network": {"id": "30d27ef3-bf9c-49b3-9d47-f54c2e695d8a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562031578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "967c5630b65447bba9ab0783fd45e8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb487dbb4-8e", "ovs_interfaceid": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2313.271386] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:21:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '683a619f-b10d-41a3-8c03-4f69f6c9ce53', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b487dbb4-8ed9-470d-a761-b7f41f4a04c6', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2313.279281] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Creating folder: Project (967c5630b65447bba9ab0783fd45e8f3). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2313.280112] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ea1357d-32a8-4a94-89a8-6af39330496e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.296923] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Created folder: Project (967c5630b65447bba9ab0783fd45e8f3) in parent group-v693573. [ 2313.297145] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Creating folder: Instances. Parent ref: group-v693681. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2313.297504] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82c9f536-7b3c-4c88-84a0-9f20b07b3141 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.305941] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Created folder: Instances in parent group-v693681. [ 2313.306182] env[68282]: DEBUG oslo.service.loopingcall [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2313.306365] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2313.306606] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41afb4ea-83f4-4e75-a54a-6306b8404dab {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.324926] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2313.324926] env[68282]: value = "task-3470645" [ 2313.324926] env[68282]: _type = "Task" [ 2313.324926] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.331872] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470645, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.334937] env[68282]: DEBUG oslo_concurrency.lockutils [None req-d49b00e5-3cfc-4cff-840a-18f1bf5f8c75 tempest-ServerShowV254Test-1108404182 tempest-ServerShowV254Test-1108404182-project-member] Lock "8ec6d449-57b1-40fb-9df9-ef1c3b69cde9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.316s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.835180] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470645, 'name': CreateVM_Task, 'duration_secs': 0.28216} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2313.835401] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2313.836102] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2313.836313] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.836638] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2313.836908] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d84b068a-e462-45c4-a026-0ced87ca0510 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.841244] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Waiting for the task: (returnval){ [ 2313.841244] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f307a8-17df-fb4c-8c30-d805da1a7513" [ 2313.841244] env[68282]: _type = "Task" [ 2313.841244] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.848563] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52f307a8-17df-fb4c-8c30-d805da1a7513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.151332] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "2c38a690-608e-4531-aeb3-629eb5c09532" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2314.352621] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2314.352621] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2314.352621] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2314.984173] env[68282]: DEBUG nova.compute.manager [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Received event network-changed-b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2314.984355] env[68282]: DEBUG nova.compute.manager [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Refreshing instance network info cache due to event network-changed-b487dbb4-8ed9-470d-a761-b7f41f4a04c6. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 2314.984582] env[68282]: DEBUG oslo_concurrency.lockutils [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] Acquiring lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2314.984726] env[68282]: DEBUG oslo_concurrency.lockutils [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] Acquired lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2314.984885] env[68282]: DEBUG nova.network.neutron [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Refreshing network info cache for port b487dbb4-8ed9-470d-a761-b7f41f4a04c6 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2315.389098] env[68282]: DEBUG nova.network.neutron [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Updated VIF entry in instance network info cache for port b487dbb4-8ed9-470d-a761-b7f41f4a04c6. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2315.389499] env[68282]: DEBUG nova.network.neutron [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Updating instance_info_cache with network_info: [{"id": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "address": "fa:16:3e:56:21:63", "network": {"id": "30d27ef3-bf9c-49b3-9d47-f54c2e695d8a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562031578-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "967c5630b65447bba9ab0783fd45e8f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "683a619f-b10d-41a3-8c03-4f69f6c9ce53", "external-id": "nsx-vlan-transportzone-898", "segmentation_id": 898, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb487dbb4-8e", "ovs_interfaceid": "b487dbb4-8ed9-470d-a761-b7f41f4a04c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.398644] env[68282]: DEBUG oslo_concurrency.lockutils [req-c0870731-65f0-46fd-a0b8-b898485c57f5 req-89e3848e-6175-4768-960e-7237797d216c service nova] Releasing lock "refresh_cache-2d391110-46da-475d-b324-d4bb6e13b4fa" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.500340] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquiring lock "6452c865-064a-4eaf-9689-e7d056d370df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.500665] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Lock "6452c865-064a-4eaf-9689-e7d056d370df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.511093] env[68282]: DEBUG nova.compute.manager [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2328.562177] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.562440] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.563914] env[68282]: INFO nova.compute.claims [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2328.682743] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e75b298-f192-43c6-b48e-5a682929b64d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.690226] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d1eafa-514a-4921-bfae-310c83dd962e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.721296] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f47cfe-0fed-47b8-99a7-33b806d5bd3d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.728622] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92ef383-50e5-44d7-afc6-09f2bc37fd61 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.741076] env[68282]: DEBUG nova.compute.provider_tree [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2328.749741] env[68282]: DEBUG nova.scheduler.client.report [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2328.764867] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.202s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2328.765338] env[68282]: DEBUG nova.compute.manager [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2328.794578] env[68282]: DEBUG nova.compute.utils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2328.795701] env[68282]: DEBUG nova.compute.manager [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Not allocating networking since 'none' was specified. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2328.803404] env[68282]: DEBUG nova.compute.manager [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2328.861012] env[68282]: DEBUG nova.compute.manager [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2328.885658] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2328.885906] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2328.886082] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2328.886273] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2328.886428] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2328.886580] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2328.886788] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2328.886952] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2328.887135] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2328.887303] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2328.887481] env[68282]: DEBUG nova.virt.hardware [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2328.888332] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4495d6a4-d963-47eb-8038-4d1090139273 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.895774] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a02deb-064a-41cf-9b20-de147913f91f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.908749] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Instance VIF info [] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2328.914124] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Creating folder: Project (c4a9f9f9be404c029f1428a427801268). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2328.914364] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37acad12-77df-4980-8638-f31dc981eb33 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.923449] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Created folder: Project (c4a9f9f9be404c029f1428a427801268) in parent group-v693573. [ 2328.923630] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Creating folder: Instances. Parent ref: group-v693684. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2328.923822] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5eb5174-65b2-4706-ad3d-9c6c6fec0caf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.931686] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Created folder: Instances in parent group-v693684. [ 2328.931903] env[68282]: DEBUG oslo.service.loopingcall [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2328.932320] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2328.932510] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44dfa0c3-e211-46b7-8423-7d3d095ff8dd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.947902] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2328.947902] env[68282]: value = "task-3470648" [ 2328.947902] env[68282]: _type = "Task" [ 2328.947902] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.954785] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470648, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.457946] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470648, 'name': CreateVM_Task, 'duration_secs': 0.265126} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.458152] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2329.458647] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2329.458793] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2329.459247] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2329.459387] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88070896-f754-4214-b3c6-e06ca5b7ef15 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.463943] env[68282]: DEBUG oslo_vmware.api [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Waiting for the task: (returnval){ [ 2329.463943] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521a5de7-beba-84d9-59cb-a9712b717b8c" [ 2329.463943] env[68282]: _type = "Task" [ 2329.463943] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.472915] env[68282]: DEBUG oslo_vmware.api [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521a5de7-beba-84d9-59cb-a9712b717b8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.973881] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.974216] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2329.974333] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b10a50b4-b247-4ba3-916d-84ec7fd314c8 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2340.188104] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "f516baac-8c80-4223-8b68-3779bd785f0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2340.188391] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "f516baac-8c80-4223-8b68-3779bd785f0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2340.201093] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2340.248087] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2340.248339] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2340.249772] env[68282]: INFO nova.compute.claims [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2340.378840] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4dc83d-1ed0-4e53-a105-664c16deae5f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.386874] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e0bf20-8251-4921-8bd2-5e71db56d8cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.418413] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca84760-e68e-4342-805e-04c2a9352df2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.425618] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28abb8cb-ff5a-492d-b92b-9f4612065b43 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.438507] env[68282]: DEBUG nova.compute.provider_tree [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2340.447533] env[68282]: DEBUG nova.scheduler.client.report [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2340.462137] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.214s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2340.462621] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2340.494010] env[68282]: DEBUG nova.compute.utils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2340.495360] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2340.495535] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2340.503054] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2340.557693] env[68282]: DEBUG nova.policy [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '586f6880a99449eeab1379280df867a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12975c11434b4530b1f38c1eceaa4e68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 2340.563179] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2340.588488] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2340.588723] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2340.588900] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2340.589103] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2340.589261] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2340.589409] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2340.589621] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2340.589777] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2340.589950] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2340.590132] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2340.590316] env[68282]: DEBUG nova.virt.hardware [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2340.591202] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2615b5fa-6de3-4ea1-9925-1a29e689a44b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.599541] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060433a2-7089-4e18-a82d-30360c8e59db {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.847237] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Successfully created port: 2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2341.340784] env[68282]: DEBUG nova.compute.manager [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Received event network-vif-plugged-2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2341.341055] env[68282]: DEBUG oslo_concurrency.lockutils [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] Acquiring lock "f516baac-8c80-4223-8b68-3779bd785f0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2341.341257] env[68282]: DEBUG oslo_concurrency.lockutils [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] Lock "f516baac-8c80-4223-8b68-3779bd785f0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2341.341423] env[68282]: DEBUG oslo_concurrency.lockutils [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] Lock "f516baac-8c80-4223-8b68-3779bd785f0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2341.341593] env[68282]: DEBUG nova.compute.manager [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] No waiting events found dispatching network-vif-plugged-2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2341.341773] env[68282]: WARNING nova.compute.manager [req-d46f2a3f-07c3-452b-b1b9-68e2e71e43da req-02155e92-070a-458b-89be-addef7c25071 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Received unexpected event network-vif-plugged-2479d11f-3760-4137-aafd-2f49fdfd20e8 for instance with vm_state building and task_state spawning. [ 2341.424930] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Successfully updated port: 2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2341.438891] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2341.439116] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2341.439319] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2341.480408] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2341.639718] env[68282]: DEBUG nova.network.neutron [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Updating instance_info_cache with network_info: [{"id": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "address": "fa:16:3e:32:cd:fe", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2479d11f-37", "ovs_interfaceid": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2341.651924] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2341.652471] env[68282]: DEBUG nova.compute.manager [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Instance network_info: |[{"id": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "address": "fa:16:3e:32:cd:fe", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2479d11f-37", "ovs_interfaceid": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2341.652625] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:cd:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a485857d-7086-4dcf-9d65-d0dcd177fcb0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2479d11f-3760-4137-aafd-2f49fdfd20e8', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2341.660121] env[68282]: DEBUG oslo.service.loopingcall [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2341.660563] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2341.660906] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49b10863-64b9-4b03-a524-616b74a8ab6b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2341.680517] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2341.680517] env[68282]: value = "task-3470649" [ 2341.680517] env[68282]: _type = "Task" [ 2341.680517] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2341.688180] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470649, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2342.192032] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470649, 'name': CreateVM_Task, 'duration_secs': 0.275712} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2342.192032] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2342.192557] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2342.192725] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2342.193049] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2342.193296] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-919e0f48-7895-47a6-adb4-d73a06df855f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.197806] env[68282]: DEBUG oslo_vmware.api [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2342.197806] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5280255d-a902-24a6-ad23-6294d1a2524c" [ 2342.197806] env[68282]: _type = "Task" [ 2342.197806] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2342.205270] env[68282]: DEBUG oslo_vmware.api [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]5280255d-a902-24a6-ad23-6294d1a2524c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2342.708628] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2342.708997] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2342.709122] env[68282]: DEBUG oslo_concurrency.lockutils [None req-9870b386-c6bf-4ef6-99a1-1356f6d8f3d9 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2343.372535] env[68282]: DEBUG nova.compute.manager [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Received event network-changed-2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2343.372753] env[68282]: DEBUG nova.compute.manager [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Refreshing instance network info cache due to event network-changed-2479d11f-3760-4137-aafd-2f49fdfd20e8. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 2343.372969] env[68282]: DEBUG oslo_concurrency.lockutils [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] Acquiring lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2343.373128] env[68282]: DEBUG oslo_concurrency.lockutils [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] Acquired lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2343.373291] env[68282]: DEBUG nova.network.neutron [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Refreshing network info cache for port 2479d11f-3760-4137-aafd-2f49fdfd20e8 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2343.606142] env[68282]: DEBUG nova.network.neutron [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Updated VIF entry in instance network info cache for port 2479d11f-3760-4137-aafd-2f49fdfd20e8. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2343.606516] env[68282]: DEBUG nova.network.neutron [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Updating instance_info_cache with network_info: [{"id": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "address": "fa:16:3e:32:cd:fe", "network": {"id": "5d5fb3d6-81b7-45b5-bcd6-0551dc000864", "bridge": "br-int", "label": "tempest-ServersTestJSON-426535076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12975c11434b4530b1f38c1eceaa4e68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a485857d-7086-4dcf-9d65-d0dcd177fcb0", "external-id": "nsx-vlan-transportzone-232", "segmentation_id": 232, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2479d11f-37", "ovs_interfaceid": "2479d11f-3760-4137-aafd-2f49fdfd20e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.616053] env[68282]: DEBUG oslo_concurrency.lockutils [req-38be5848-dc28-4eb0-8a9b-cbe1000d1066 req-7cce7595-6117-48a0-b2e0-5a0478886dd0 service nova] Releasing lock "refresh_cache-f516baac-8c80-4223-8b68-3779bd785f0c" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.087671] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.088027] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2353.088085] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2353.100380] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.100610] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.100781] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2353.100936] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2353.102268] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d6e653-0c36-4735-930b-07c2b03f9357 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.111327] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60132f8-c606-4fd9-aafd-a42eef2c47a6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.125083] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb4eb33-1ada-4180-842d-2116c66c3865 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.131396] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4de6fa-e187-421e-853e-3093dba755ad {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.160867] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180922MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2353.161035] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2353.161232] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2353.227292] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.227462] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.227594] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 41e28779-65ad-476c-bc9c-9747beba2813 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.227720] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.227842] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d391110-46da-475d-b324-d4bb6e13b4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.227961] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6452c865-064a-4eaf-9689-e7d056d370df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.228095] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f516baac-8c80-4223-8b68-3779bd785f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2353.228281] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2353.228420] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2353.314563] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac44949-379f-4953-a1ac-d85d55c90a35 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.322291] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa258de6-5ea5-4e34-8dd1-c149b71d4822 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.351992] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9468a4-3cfb-457d-9994-d91468425010 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.358827] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf85edb-2e77-4c92-9919-31b82e671135 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.371292] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2353.379365] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2353.392784] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2353.392784] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.231s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2357.393060] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2357.393381] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2357.393381] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2357.409889] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410066] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410192] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410333] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410464] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410587] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410707] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2357.410827] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2359.086952] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.087342] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.087384] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2360.083536] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.086921] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.723148] env[68282]: WARNING oslo_vmware.rw_handles [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2361.723148] env[68282]: ERROR oslo_vmware.rw_handles [ 2361.723560] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2361.725399] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2361.725687] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Copying Virtual Disk [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/0e48f0c3-3c13-4ba7-89ee-d781d706c95d/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2361.725975] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7793ceb1-eba6-4324-8a2d-8d2382bca40a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.733069] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 2361.733069] env[68282]: value = "task-3470650" [ 2361.733069] env[68282]: _type = "Task" [ 2361.733069] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.740888] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.087901] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.243010] env[68282]: DEBUG oslo_vmware.exceptions [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2362.243331] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2362.243886] env[68282]: ERROR nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.243886] env[68282]: Faults: ['InvalidArgument'] [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Traceback (most recent call last): [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] yield resources [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self.driver.spawn(context, instance, image_meta, [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self._fetch_image_if_missing(context, vi) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] image_cache(vi, tmp_image_ds_loc) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] vm_util.copy_virtual_disk( [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] session._wait_for_task(vmdk_copy_task) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return self.wait_for_task(task_ref) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return evt.wait() [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] result = hub.switch() [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return self.greenlet.switch() [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self.f(*self.args, **self.kw) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] raise exceptions.translate_fault(task_info.error) [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Faults: ['InvalidArgument'] [ 2362.243886] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] [ 2362.244806] env[68282]: INFO nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Terminating instance [ 2362.245820] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2362.246037] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2362.246282] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfdda8c6-6f16-42a0-b875-bb6835b35374 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.248388] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2362.248582] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2362.249307] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa603ae-31c2-44ca-873d-84e54d01f672 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.255871] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2362.256125] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c36af422-fcea-496c-b034-2abc746c34e1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.258266] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2362.258477] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2362.259447] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea47aecd-a7e2-4aff-adfd-b67735079d6c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.264055] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2362.264055] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521578fd-fcc6-e242-2558-b9bd728f8f75" [ 2362.264055] env[68282]: _type = "Task" [ 2362.264055] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2362.272144] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]521578fd-fcc6-e242-2558-b9bd728f8f75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.328227] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2362.328340] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2362.328529] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleting the datastore file [datastore2] e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2362.328813] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abfea94f-ff1c-47ed-8290-518f1e9aed29 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.334888] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for the task: (returnval){ [ 2362.334888] env[68282]: value = "task-3470652" [ 2362.334888] env[68282]: _type = "Task" [ 2362.334888] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2362.342518] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470652, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2362.773927] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2362.774243] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating directory with path [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2362.774483] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a2aee27-feab-44c3-ab3d-48057b0a433c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.785599] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Created directory with path [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2362.785788] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Fetch image to [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2362.785961] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2362.786686] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6409f72b-a493-4be5-99c9-b6355546e046 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.792920] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff6f75f-4b63-4cff-8327-7f8136b1535e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.801711] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c002c9f2-103e-4c66-b79d-6f0aef279981 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.832349] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542f20cc-2d5a-4406-8298-552c830f2ddc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.840192] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0d5efde7-7e0b-4ff3-aed4-361122de6f30 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.844268] env[68282]: DEBUG oslo_vmware.api [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Task: {'id': task-3470652, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074083} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.844768] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2362.844951] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2362.845143] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.845339] env[68282]: INFO nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2362.847510] env[68282]: DEBUG nova.compute.claims [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2362.847686] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.847894] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.861064] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2362.914500] env[68282]: DEBUG oslo_vmware.rw_handles [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2362.978323] env[68282]: DEBUG oslo_vmware.rw_handles [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2362.978592] env[68282]: DEBUG oslo_vmware.rw_handles [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2363.031132] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ace8c55-abd4-4ac5-8163-c0bc1356827c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.038276] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9d814f-e2ed-4173-98d7-840c73c61149 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.067870] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259b02d1-a0c1-4eb6-866b-d04cb68d73ea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.074992] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e69958-8839-4db1-9b18-5a85f5365566 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.088544] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.088966] env[68282]: DEBUG nova.compute.provider_tree [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2363.097814] env[68282]: DEBUG nova.scheduler.client.report [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2363.113146] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.265s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.113662] env[68282]: ERROR nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2363.113662] env[68282]: Faults: ['InvalidArgument'] [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Traceback (most recent call last): [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self.driver.spawn(context, instance, image_meta, [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self._fetch_image_if_missing(context, vi) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] image_cache(vi, tmp_image_ds_loc) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] vm_util.copy_virtual_disk( [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] session._wait_for_task(vmdk_copy_task) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return self.wait_for_task(task_ref) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return evt.wait() [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] result = hub.switch() [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] return self.greenlet.switch() [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] self.f(*self.args, **self.kw) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] raise exceptions.translate_fault(task_info.error) [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Faults: ['InvalidArgument'] [ 2363.113662] env[68282]: ERROR nova.compute.manager [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] [ 2363.114496] env[68282]: DEBUG nova.compute.utils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2363.115801] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Build of instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 was re-scheduled: A specified parameter was not correct: fileType [ 2363.115801] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2363.116182] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2363.116359] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2363.116538] env[68282]: DEBUG nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2363.116701] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2363.425502] env[68282]: DEBUG nova.network.neutron [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2363.438164] env[68282]: INFO nova.compute.manager [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Took 0.32 seconds to deallocate network for instance. [ 2363.529859] env[68282]: INFO nova.scheduler.client.report [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Deleted allocations for instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 [ 2363.551282] env[68282]: DEBUG oslo_concurrency.lockutils [None req-4db9dbd0-3fa0-4cb5-a656-4aad22cb76d3 tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 449.561s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.551620] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 253.973s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.551853] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Acquiring lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2363.552073] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.552249] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.554190] env[68282]: INFO nova.compute.manager [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Terminating instance [ 2363.557732] env[68282]: DEBUG nova.compute.manager [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2363.557939] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2363.558207] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dad0f900-5680-4dd6-92e3-e584a13daafb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.566849] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fc57d6-1293-49e9-84ff-f45ced76bd3d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.594746] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9f7b4b0-16b4-4621-962c-6ddb2f5c5544 could not be found. [ 2363.594948] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2363.595244] env[68282]: INFO nova.compute.manager [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2363.595540] env[68282]: DEBUG oslo.service.loopingcall [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2363.595808] env[68282]: DEBUG nova.compute.manager [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2363.595937] env[68282]: DEBUG nova.network.neutron [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2363.623916] env[68282]: DEBUG nova.network.neutron [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2363.633792] env[68282]: INFO nova.compute.manager [-] [instance: e9f7b4b0-16b4-4621-962c-6ddb2f5c5544] Took 0.04 seconds to deallocate network for instance. [ 2363.740179] env[68282]: DEBUG oslo_concurrency.lockutils [None req-aca232c9-1750-4fc1-ac50-f6024b7526ea tempest-ImagesTestJSON-637189425 tempest-ImagesTestJSON-637189425-project-member] Lock "e9f7b4b0-16b4-4621-962c-6ddb2f5c5544" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.188s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.788432] env[68282]: WARNING oslo_vmware.rw_handles [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2411.788432] env[68282]: ERROR oslo_vmware.rw_handles [ 2411.789151] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2411.791141] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2411.791384] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Copying Virtual Disk [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/f7d5e25e-75d0-40cf-8217-13f465af73a3/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2411.791694] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99d6139b-bae3-4180-bc3c-b6b133e585e2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.800053] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2411.800053] env[68282]: value = "task-3470653" [ 2411.800053] env[68282]: _type = "Task" [ 2411.800053] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2411.807936] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.310528] env[68282]: DEBUG oslo_vmware.exceptions [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2412.310845] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2412.311437] env[68282]: ERROR nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.311437] env[68282]: Faults: ['InvalidArgument'] [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Traceback (most recent call last): [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] yield resources [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self.driver.spawn(context, instance, image_meta, [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self._fetch_image_if_missing(context, vi) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] image_cache(vi, tmp_image_ds_loc) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] vm_util.copy_virtual_disk( [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] session._wait_for_task(vmdk_copy_task) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return self.wait_for_task(task_ref) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return evt.wait() [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] result = hub.switch() [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return self.greenlet.switch() [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self.f(*self.args, **self.kw) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] raise exceptions.translate_fault(task_info.error) [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Faults: ['InvalidArgument'] [ 2412.311437] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] [ 2412.312227] env[68282]: INFO nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Terminating instance [ 2412.313392] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2412.313604] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2412.313843] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f27fc1b-1301-4b76-8617-1a1747f8f1e2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.316109] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2412.316310] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2412.317042] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd13401-6a76-4fb0-8de2-cb3b049a618b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.323562] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2412.323769] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db9fd353-f8a8-4436-a305-d86c236b66b0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.325851] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2412.326067] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2412.326981] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3b60ae8-f557-4874-8bc5-dfc8999932cb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.331280] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2412.331280] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]520de501-898d-2ba1-128c-41e8355f1fe5" [ 2412.331280] env[68282]: _type = "Task" [ 2412.331280] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2412.338382] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]520de501-898d-2ba1-128c-41e8355f1fe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.395107] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2412.395288] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2412.395470] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleting the datastore file [datastore2] 41e28779-65ad-476c-bc9c-9747beba2813 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2412.395725] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da3b043b-6382-433b-9acb-70ab792c2695 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.402211] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for the task: (returnval){ [ 2412.402211] env[68282]: value = "task-3470655" [ 2412.402211] env[68282]: _type = "Task" [ 2412.402211] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2412.409466] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470655, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.841521] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2412.841940] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating directory with path [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2412.842090] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed48023c-8b87-4148-971d-764f596baf99 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.853594] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Created directory with path [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2412.853783] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Fetch image to [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2412.853947] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2412.854669] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2a2df3-fedb-44d6-8f97-a7ccf4f97a3a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.860878] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98ccbac-b7c6-4ba6-9c84-aacfd397b8b8 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.869608] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6254e17d-e31f-45db-92a8-16a10be1903d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.900199] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da8ab8e-8b97-4a4a-8711-932c2b17bf68 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.907080] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd08d145-9aba-4bd8-9777-3cdec08fde70 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.911190] env[68282]: DEBUG oslo_vmware.api [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Task: {'id': task-3470655, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077114} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2412.911753] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2412.911998] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2412.912207] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2412.912390] env[68282]: INFO nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2412.914467] env[68282]: DEBUG nova.compute.claims [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2412.914674] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.914907] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.934353] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2412.986918] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2413.048557] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2413.048763] env[68282]: DEBUG oslo_vmware.rw_handles [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2413.088266] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e7b18c-7ef2-446a-bf84-b6e27a7bc7d6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.095791] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e3859f-626d-49c6-9894-a591d6c8d15f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.126491] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f94aea-dea2-4e0c-aaf9-d86e331f4ea9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.133505] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a7f152-e9db-4570-80d5-5bbf019bed8c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.146626] env[68282]: DEBUG nova.compute.provider_tree [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2413.155276] env[68282]: DEBUG nova.scheduler.client.report [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2413.169752] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.255s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.170290] env[68282]: ERROR nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2413.170290] env[68282]: Faults: ['InvalidArgument'] [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Traceback (most recent call last): [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self.driver.spawn(context, instance, image_meta, [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self._fetch_image_if_missing(context, vi) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] image_cache(vi, tmp_image_ds_loc) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] vm_util.copy_virtual_disk( [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] session._wait_for_task(vmdk_copy_task) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return self.wait_for_task(task_ref) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return evt.wait() [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] result = hub.switch() [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] return self.greenlet.switch() [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] self.f(*self.args, **self.kw) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] raise exceptions.translate_fault(task_info.error) [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Faults: ['InvalidArgument'] [ 2413.170290] env[68282]: ERROR nova.compute.manager [instance: 41e28779-65ad-476c-bc9c-9747beba2813] [ 2413.171091] env[68282]: DEBUG nova.compute.utils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2413.172560] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Build of instance 41e28779-65ad-476c-bc9c-9747beba2813 was re-scheduled: A specified parameter was not correct: fileType [ 2413.172560] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2413.172947] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2413.173155] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2413.173335] env[68282]: DEBUG nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2413.173503] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2413.497177] env[68282]: DEBUG nova.network.neutron [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.509874] env[68282]: INFO nova.compute.manager [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Took 0.34 seconds to deallocate network for instance. [ 2413.608302] env[68282]: INFO nova.scheduler.client.report [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Deleted allocations for instance 41e28779-65ad-476c-bc9c-9747beba2813 [ 2413.635264] env[68282]: DEBUG oslo_concurrency.lockutils [None req-85e2f73d-1f54-400a-8491-6b2cf6035f8b tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 465.977s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.635560] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 269.726s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.635793] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "41e28779-65ad-476c-bc9c-9747beba2813-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.636191] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.636430] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.638845] env[68282]: INFO nova.compute.manager [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Terminating instance [ 2413.640685] env[68282]: DEBUG nova.compute.manager [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2413.640880] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2413.641379] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e0c71335-1944-404c-b7ed-b8fc89bef271 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.650277] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bca01a4-c224-4f3b-8c87-acbb696bdfb2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.676654] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 41e28779-65ad-476c-bc9c-9747beba2813 could not be found. [ 2413.676862] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2413.677053] env[68282]: INFO nova.compute.manager [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2413.677328] env[68282]: DEBUG oslo.service.loopingcall [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2413.677783] env[68282]: DEBUG nova.compute.manager [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2413.677897] env[68282]: DEBUG nova.network.neutron [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2413.702048] env[68282]: DEBUG nova.network.neutron [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.710017] env[68282]: INFO nova.compute.manager [-] [instance: 41e28779-65ad-476c-bc9c-9747beba2813] Took 0.03 seconds to deallocate network for instance. [ 2413.799391] env[68282]: DEBUG oslo_concurrency.lockutils [None req-dbe0dfcc-5d55-47f5-9143-3f6d4994ffe4 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Lock "41e28779-65ad-476c-bc9c-9747beba2813" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.087411] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.087828] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2415.087828] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.099641] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.099854] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2415.100032] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.100192] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2415.101282] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c5e9bd-9bc1-4a32-a6a1-dbe97b0e12c9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.110056] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c173863f-afb3-47de-bc69-51823c6aedd2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.123446] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc057199-3547-4bca-a78e-5e611c0eb7d9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.129385] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80a6abc-8c17-434f-a606-f67e3974c183 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.158907] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180929MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2415.159062] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.159321] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2415.210879] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 14e97724-1044-4f32-ac27-8062120c2c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2415.211059] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2415.211198] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d391110-46da-475d-b324-d4bb6e13b4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2415.211325] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6452c865-064a-4eaf-9689-e7d056d370df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2415.211448] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f516baac-8c80-4223-8b68-3779bd785f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2415.211622] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2415.211759] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2415.280104] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb84713-db2a-429b-933d-dd4ff47278c2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.288094] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e76fe04-e5e8-4851-8b16-88678ecc15e9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.318277] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103f5918-2d7d-4c12-bd78-3cf638908364 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.326081] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ec0f2e-e4d4-4e1d-ab4e-64fdf1ca432f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.340563] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2415.349194] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2415.366370] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2415.366573] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.207s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.367227] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.367591] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2419.367591] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2419.382071] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2419.382237] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2419.382369] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2419.382496] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2419.382621] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2419.382744] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2419.383261] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.383448] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.087640] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.087841] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2423.082372] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2423.087049] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2461.292361] env[68282]: WARNING oslo_vmware.rw_handles [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2461.292361] env[68282]: ERROR oslo_vmware.rw_handles [ 2461.293093] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2461.294818] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2461.295087] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Copying Virtual Disk [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/82ad7a7d-23fb-4e5e-a367-8927ef829f12/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2461.295383] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d5a0157-0871-4d1a-81f3-f5c1dd4da661 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.303207] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2461.303207] env[68282]: value = "task-3470656" [ 2461.303207] env[68282]: _type = "Task" [ 2461.303207] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.311187] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.813583] env[68282]: DEBUG oslo_vmware.exceptions [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2461.813906] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2461.814477] env[68282]: ERROR nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2461.814477] env[68282]: Faults: ['InvalidArgument'] [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Traceback (most recent call last): [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] yield resources [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self.driver.spawn(context, instance, image_meta, [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self._fetch_image_if_missing(context, vi) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] image_cache(vi, tmp_image_ds_loc) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] vm_util.copy_virtual_disk( [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] session._wait_for_task(vmdk_copy_task) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return self.wait_for_task(task_ref) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return evt.wait() [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] result = hub.switch() [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return self.greenlet.switch() [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self.f(*self.args, **self.kw) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] raise exceptions.translate_fault(task_info.error) [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Faults: ['InvalidArgument'] [ 2461.814477] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] [ 2461.815848] env[68282]: INFO nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Terminating instance [ 2461.817247] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2461.817460] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2461.817708] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fefee45-5efc-4699-8dc0-10bdf22b412a {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.821314] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2461.821314] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2461.822140] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcbc23b-8f62-4ca3-84b4-d206cd86c61f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.828789] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2461.829083] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50f45345-44b4-48e0-bdfb-29d395a75dbd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.831207] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2461.831384] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2461.832337] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c6d6ad-e5e0-4aed-9004-827929ab03ed {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.837271] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2461.837271] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52440671-ee0c-4448-69a4-256827372b6b" [ 2461.837271] env[68282]: _type = "Task" [ 2461.837271] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.844455] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52440671-ee0c-4448-69a4-256827372b6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2461.894438] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2461.894654] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2461.894807] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleting the datastore file [datastore2] 14e97724-1044-4f32-ac27-8062120c2c46 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2461.895096] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c591fc3b-5ea9-4684-a0a5-84fb2ee61399 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.901842] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for the task: (returnval){ [ 2461.901842] env[68282]: value = "task-3470658" [ 2461.901842] env[68282]: _type = "Task" [ 2461.901842] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2461.909699] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.348072] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2462.348072] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating directory with path [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2462.348072] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-330892fb-e74c-476c-8d2c-b51745da6048 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.360278] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Created directory with path [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2462.360479] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Fetch image to [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2462.360654] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2462.361366] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598b6033-a97e-44eb-8440-f1190026df3c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.367661] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abccdd2-c48b-4061-8028-2334466d5f07 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.376374] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6394e35-f91c-4f5f-873d-f540598113f7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.408446] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b094dff-02ce-4ccd-b095-7f8ca2ad5c7f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.416309] env[68282]: DEBUG oslo_vmware.api [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Task: {'id': task-3470658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076515} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.416492] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8fa42722-57c8-4603-9538-dc141cf5725f {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.418103] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2462.418295] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2462.418470] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2462.418645] env[68282]: INFO nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2462.420719] env[68282]: DEBUG nova.compute.claims [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2462.420895] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2462.421117] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2462.437033] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2462.490639] env[68282]: DEBUG oslo_vmware.rw_handles [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2462.551077] env[68282]: DEBUG oslo_vmware.rw_handles [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2462.551301] env[68282]: DEBUG oslo_vmware.rw_handles [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2462.581684] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537aaf62-45c3-47e2-a45f-0b02aaa67a12 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.589082] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7333347a-2567-4e26-8aa6-9c3704c38394 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.619786] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3998ad29-e741-42a5-9a5c-a7fc9437b4e6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.626549] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7dc0bd-189f-4dd1-9090-fe5cb3b29ad1 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.639187] env[68282]: DEBUG nova.compute.provider_tree [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2462.647339] env[68282]: DEBUG nova.scheduler.client.report [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2462.678636] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.257s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2462.679229] env[68282]: ERROR nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2462.679229] env[68282]: Faults: ['InvalidArgument'] [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Traceback (most recent call last): [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self.driver.spawn(context, instance, image_meta, [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self._fetch_image_if_missing(context, vi) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] image_cache(vi, tmp_image_ds_loc) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] vm_util.copy_virtual_disk( [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] session._wait_for_task(vmdk_copy_task) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return self.wait_for_task(task_ref) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return evt.wait() [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] result = hub.switch() [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] return self.greenlet.switch() [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] self.f(*self.args, **self.kw) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] raise exceptions.translate_fault(task_info.error) [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Faults: ['InvalidArgument'] [ 2462.679229] env[68282]: ERROR nova.compute.manager [instance: 14e97724-1044-4f32-ac27-8062120c2c46] [ 2462.680442] env[68282]: DEBUG nova.compute.utils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2462.681414] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Build of instance 14e97724-1044-4f32-ac27-8062120c2c46 was re-scheduled: A specified parameter was not correct: fileType [ 2462.681414] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2462.681783] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2462.681958] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2462.682146] env[68282]: DEBUG nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2462.682312] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2462.986390] env[68282]: DEBUG nova.network.neutron [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2462.996366] env[68282]: INFO nova.compute.manager [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Took 0.31 seconds to deallocate network for instance. [ 2463.098340] env[68282]: INFO nova.scheduler.client.report [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Deleted allocations for instance 14e97724-1044-4f32-ac27-8062120c2c46 [ 2463.122046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-28e82861-0910-4cae-ba6c-142780f6ab9c tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 528.308s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.122046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 332.396s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.122046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Acquiring lock "14e97724-1044-4f32-ac27-8062120c2c46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2463.122046] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2463.122382] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2463.124898] env[68282]: INFO nova.compute.manager [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Terminating instance [ 2463.126391] env[68282]: DEBUG nova.compute.manager [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2463.126595] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2463.127495] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ef5080b-f630-41b1-9248-0535ec24e429 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.136861] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf062a5-7fec-4076-88df-113c049d1af4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2463.162288] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 14e97724-1044-4f32-ac27-8062120c2c46 could not be found. [ 2463.162654] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2463.162769] env[68282]: INFO nova.compute.manager [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2463.163029] env[68282]: DEBUG oslo.service.loopingcall [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2463.163253] env[68282]: DEBUG nova.compute.manager [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2463.163350] env[68282]: DEBUG nova.network.neutron [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2463.202308] env[68282]: DEBUG nova.network.neutron [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.211060] env[68282]: INFO nova.compute.manager [-] [instance: 14e97724-1044-4f32-ac27-8062120c2c46] Took 0.05 seconds to deallocate network for instance. [ 2463.296054] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2115e72b-e640-4d1c-8d90-86141f627244 tempest-DeleteServersTestJSON-1799391927 tempest-DeleteServersTestJSON-1799391927-project-member] Lock "14e97724-1044-4f32-ac27-8062120c2c46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2473.087681] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2473.088086] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11263}} [ 2473.099878] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] There are 0 instances to clean {{(pid=68282) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11272}} [ 2475.099708] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2475.100168] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2475.100168] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2475.111670] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2475.111904] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2475.112097] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2475.112258] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2475.113810] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee014cd-1997-44b8-bde3-5c90055305ae {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.122290] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d95e9d5-ddad-4185-8c2d-0752d8ce660b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.136010] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e09ea94-1309-4c6e-8b39-8c94c1c3d34b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.142117] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b8edee-d098-470d-b014-226899336b68 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.170444] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180930MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2475.170600] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2475.170795] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2475.221683] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2c38a690-608e-4531-aeb3-629eb5c09532 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.221842] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d391110-46da-475d-b324-d4bb6e13b4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.221973] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6452c865-064a-4eaf-9689-e7d056d370df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.222113] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f516baac-8c80-4223-8b68-3779bd785f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2475.222291] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2475.222428] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2475.274824] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2690321d-e7b5-45d6-bea9-26568e1a4632 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.283138] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421d9293-f700-4787-a257-3d993e1d99db {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.311665] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d98fd386-1d4b-4564-9275-2df5f5c83279 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.318352] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784e4415-decc-4334-a037-7ebb66ba5a00 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2475.330874] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2475.339335] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2475.354734] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2475.354869] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.184s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2479.343507] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2479.343866] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2479.343866] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2479.357394] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2479.357552] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2479.357686] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2479.357827] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2479.358015] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2479.358492] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2481.087401] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.083604] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.099382] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.099382] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2483.094617] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2483.567792] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2483.567792] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 2483.567792] env[68282]: value = "domain-c8" [ 2483.567792] env[68282]: _type = "ClusterComputeResource" [ 2483.567792] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2483.568870] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f29f1d1-11ae-4ec8-b78c-7af823404564 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2483.582123] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 4 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2484.118542] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2485.082968] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2499.543652] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._sync_power_states {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2499.558453] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Getting list of instances from cluster (obj){ [ 2499.558453] env[68282]: value = "domain-c8" [ 2499.558453] env[68282]: _type = "ClusterComputeResource" [ 2499.558453] env[68282]: } {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2499.559785] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a42ccb7-1f06-48f0-8188-815da9aaf3d7 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2499.572475] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Got total of 4 instances {{(pid=68282) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2499.572619] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 2c38a690-608e-4531-aeb3-629eb5c09532 {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 2499.572822] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 2d391110-46da-475d-b324-d4bb6e13b4fa {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 2499.572986] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid 6452c865-064a-4eaf-9689-e7d056d370df {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 2499.573174] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Triggering sync for uuid f516baac-8c80-4223-8b68-3779bd785f0c {{(pid=68282) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10373}} [ 2499.573479] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "2c38a690-608e-4531-aeb3-629eb5c09532" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2499.573719] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "2d391110-46da-475d-b324-d4bb6e13b4fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2499.573927] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "6452c865-064a-4eaf-9689-e7d056d370df" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2499.574204] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "f516baac-8c80-4223-8b68-3779bd785f0c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2500.087195] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2500.087410] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Cleaning up deleted instances with incomplete migration {{(pid=68282) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11301}} [ 2508.396179] env[68282]: DEBUG oslo_concurrency.lockutils [None req-f4362b94-5913-41b5-b953-ce325b1a70e6 tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquiring lock "2d391110-46da-475d-b324-d4bb6e13b4fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2511.309850] env[68282]: WARNING oslo_vmware.rw_handles [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2511.309850] env[68282]: ERROR oslo_vmware.rw_handles [ 2511.310732] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2511.312503] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2511.312786] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Copying Virtual Disk [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/49ef7050-596d-43c0-b786-e1dc5ee9b920/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2511.313118] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d0f4096-1c77-48b5-a45b-8c1a222ec7b2 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.322328] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2511.322328] env[68282]: value = "task-3470660" [ 2511.322328] env[68282]: _type = "Task" [ 2511.322328] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.330993] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.835604] env[68282]: DEBUG oslo_vmware.exceptions [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Fault InvalidArgument not matched. {{(pid=68282) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2511.835604] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2511.835604] env[68282]: ERROR nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2511.835604] env[68282]: Faults: ['InvalidArgument'] [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Traceback (most recent call last): [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] yield resources [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self.driver.spawn(context, instance, image_meta, [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self._fetch_image_if_missing(context, vi) [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] image_cache(vi, tmp_image_ds_loc) [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] vm_util.copy_virtual_disk( [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] session._wait_for_task(vmdk_copy_task) [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return self.wait_for_task(task_ref) [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return evt.wait() [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] result = hub.switch() [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return self.greenlet.switch() [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2511.835604] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self.f(*self.args, **self.kw) [ 2511.836384] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2511.836384] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] raise exceptions.translate_fault(task_info.error) [ 2511.836384] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2511.836384] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Faults: ['InvalidArgument'] [ 2511.836384] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] [ 2511.836384] env[68282]: INFO nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Terminating instance [ 2511.836653] env[68282]: DEBUG oslo_concurrency.lockutils [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2511.836862] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2511.837120] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9b26ea5-530e-4f6d-8ede-b3c82e2f9ea3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.840543] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2511.840745] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2511.842251] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a904bb51-dc6d-4fbf-b1ec-453872cb63f6 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.845098] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2511.845274] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68282) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2511.846385] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3dc1e8c-9d77-4e35-8af8-d8d42ddec36c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.852149] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Unregistering the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2511.852674] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ca60cea-77d6-463e-a485-35396db2cebf {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.855066] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Waiting for the task: (returnval){ [ 2511.855066] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523d97db-b938-1010-9adc-8f219da3696a" [ 2511.855066] env[68282]: _type = "Task" [ 2511.855066] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.864459] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]523d97db-b938-1010-9adc-8f219da3696a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2511.922656] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Unregistered the VM {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2511.922901] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Deleting contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2511.923096] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleting the datastore file [datastore2] 2c38a690-608e-4531-aeb3-629eb5c09532 {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2511.923373] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d68be745-3169-4601-b3f7-92ba6f1799d3 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.930501] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for the task: (returnval){ [ 2511.930501] env[68282]: value = "task-3470662" [ 2511.930501] env[68282]: _type = "Task" [ 2511.930501] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2511.942606] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2512.365243] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Preparing fetch location {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2512.365508] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Creating directory with path [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2512.365714] env[68282]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4b0e6d0-f05f-4da1-a7dc-f87c9629c274 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.377964] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Created directory with path [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2512.378169] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Fetch image to [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2512.378347] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2512.379071] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93eb0d15-34aa-4280-951a-8894c245e8c0 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.385288] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e937e7ae-fab6-42d0-9878-839dd68e5ef4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.394279] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62136c47-d624-484d-8cfb-35fcab2217eb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.426223] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1050a61-a6c8-466f-82a1-8421cb9211d5 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.435662] env[68282]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-21645821-4804-4a41-ad2f-d47c7cb6686c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.442720] env[68282]: DEBUG oslo_vmware.api [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Task: {'id': task-3470662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076302} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2512.443046] env[68282]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted the datastore file {{(pid=68282) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2512.443270] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Deleted contents of the VM from datastore datastore2 {{(pid=68282) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2512.443476] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2512.443687] env[68282]: INFO nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2512.446149] env[68282]: DEBUG nova.compute.claims [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Aborting claim: {{(pid=68282) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2512.446357] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2512.446619] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.457368] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Downloading image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2512.512778] env[68282]: DEBUG oslo_vmware.rw_handles [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2512.573257] env[68282]: DEBUG oslo_vmware.rw_handles [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Completed reading data from the image iterator. {{(pid=68282) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2512.573303] env[68282]: DEBUG oslo_vmware.rw_handles [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68282) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2512.610346] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7782695a-3da6-4bc1-8827-55a474220b96 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.617879] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5527d41c-78a2-43e8-bd37-e844fdeb715d {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.649684] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7113eee3-08ba-4bb5-b316-339ddaeb2c75 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.656899] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0ac01-3131-439d-af56-a75781eb3a0c {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2512.670752] env[68282]: DEBUG nova.compute.provider_tree [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2512.680545] env[68282]: DEBUG nova.scheduler.client.report [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2512.694814] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.248s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.695344] env[68282]: ERROR nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2512.695344] env[68282]: Faults: ['InvalidArgument'] [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Traceback (most recent call last): [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self.driver.spawn(context, instance, image_meta, [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self._fetch_image_if_missing(context, vi) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] image_cache(vi, tmp_image_ds_loc) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] vm_util.copy_virtual_disk( [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] session._wait_for_task(vmdk_copy_task) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return self.wait_for_task(task_ref) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return evt.wait() [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] result = hub.switch() [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] return self.greenlet.switch() [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] self.f(*self.args, **self.kw) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] raise exceptions.translate_fault(task_info.error) [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Faults: ['InvalidArgument'] [ 2512.695344] env[68282]: ERROR nova.compute.manager [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] [ 2512.696065] env[68282]: DEBUG nova.compute.utils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] VimFaultException {{(pid=68282) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2512.697517] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Build of instance 2c38a690-608e-4531-aeb3-629eb5c09532 was re-scheduled: A specified parameter was not correct: fileType [ 2512.697517] env[68282]: Faults: ['InvalidArgument'] {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2512.697893] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Unplugging VIFs for instance {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2512.698078] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68282) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2512.698254] env[68282]: DEBUG nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2512.698429] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2513.006235] env[68282]: DEBUG nova.network.neutron [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2513.021847] env[68282]: INFO nova.compute.manager [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Took 0.32 seconds to deallocate network for instance. [ 2513.163848] env[68282]: INFO nova.scheduler.client.report [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Deleted allocations for instance 2c38a690-608e-4531-aeb3-629eb5c09532 [ 2513.188309] env[68282]: DEBUG oslo_concurrency.lockutils [None req-6eecd763-21d5-4985-ad65-4d1abf3b3f9e tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 394.660s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2513.188571] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 199.037s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2513.188817] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Acquiring lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2513.189049] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2513.189230] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2513.191148] env[68282]: INFO nova.compute.manager [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Terminating instance [ 2513.192955] env[68282]: DEBUG nova.compute.manager [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Start destroying the instance on the hypervisor. {{(pid=68282) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2513.193088] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Destroying instance {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2513.193462] env[68282]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79fa4d83-fe44-4e44-a2da-21f44520ab77 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.202517] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f543649-0757-4e46-ac8a-f78b43e529af {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.228716] env[68282]: WARNING nova.virt.vmwareapi.vmops [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2c38a690-608e-4531-aeb3-629eb5c09532 could not be found. [ 2513.228937] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Instance destroyed {{(pid=68282) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2513.229136] env[68282]: INFO nova.compute.manager [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2513.229380] env[68282]: DEBUG oslo.service.loopingcall [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2513.229867] env[68282]: DEBUG nova.compute.manager [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Deallocating network for instance {{(pid=68282) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2513.229972] env[68282]: DEBUG nova.network.neutron [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] deallocate_for_instance() {{(pid=68282) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2513.255773] env[68282]: DEBUG nova.network.neutron [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Updating instance_info_cache with network_info: [] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2513.263749] env[68282]: INFO nova.compute.manager [-] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] Took 0.03 seconds to deallocate network for instance. [ 2513.354439] env[68282]: DEBUG oslo_concurrency.lockutils [None req-2f3ba803-4712-4962-aa56-601918bb338f tempest-ServerDiskConfigTestJSON-1050324068 tempest-ServerDiskConfigTestJSON-1050324068-project-member] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2513.355232] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.782s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2513.355426] env[68282]: INFO nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2c38a690-608e-4531-aeb3-629eb5c09532] During sync_power_state the instance has a pending task (deleting). Skip. [ 2513.355602] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "2c38a690-608e-4531-aeb3-629eb5c09532" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2515.864819] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquiring lock "c18ebfad-0052-4421-a626-380d0ef13b69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2515.865081] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Lock "c18ebfad-0052-4421-a626-380d0ef13b69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2515.877122] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Starting instance... {{(pid=68282) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2515.939787] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2515.940217] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2515.942012] env[68282]: INFO nova.compute.claims [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2516.060017] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd56ca8e-a753-42d6-ba10-5cb05bc6fc4b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.068069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecedc9ee-7ca5-4e42-b272-eceefe95ee1b {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.101653] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c16722-e1a7-4c61-8309-9498ae722e3e {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.108901] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d016ad-9720-42df-9aa0-be82001de090 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.122240] env[68282]: DEBUG nova.compute.provider_tree [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2516.134188] env[68282]: DEBUG nova.scheduler.client.report [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2516.147969] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.208s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2516.148477] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Start building networks asynchronously for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2516.181247] env[68282]: DEBUG nova.compute.utils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Using /dev/sd instead of None {{(pid=68282) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2516.182436] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Allocating IP information in the background. {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2516.182617] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] allocate_for_instance() {{(pid=68282) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2516.192100] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Start building block device mappings for instance. {{(pid=68282) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2516.244870] env[68282]: DEBUG nova.policy [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f6f9629f9dcb4d2bbe6fe188fa2871c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '635f792088b34af1b0c66b28f087c963', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68282) authorize /opt/stack/nova/nova/policy.py:203}} [ 2516.256258] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Start spawning the instance on the hypervisor. {{(pid=68282) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2516.284258] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T04:47:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T04:46:53Z,direct_url=,disk_format='vmdk',id=658717f1-7b98-47ed-bf66-8ef1a68a7047,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4b9c04f548524c84be5a344a65dca318',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T04:46:53Z,virtual_size=,visibility=), allow threads: False {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2516.284544] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Flavor limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2516.284717] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Image limits 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2516.284905] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Flavor pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2516.285067] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Image pref 0:0:0 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2516.285221] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68282) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2516.285436] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2516.285599] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2516.285768] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Got 1 possible topologies {{(pid=68282) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2516.285934] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2516.286185] env[68282]: DEBUG nova.virt.hardware [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68282) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2516.287084] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abdb00b-0e54-451e-bb88-7442cc8e4799 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.295013] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df3324c-60f9-40ec-a8e8-26539e7593fc {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2516.573658] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Successfully created port: ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2517.273153] env[68282]: DEBUG nova.compute.manager [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Received event network-vif-plugged-ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2517.273153] env[68282]: DEBUG oslo_concurrency.lockutils [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] Acquiring lock "c18ebfad-0052-4421-a626-380d0ef13b69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2517.273153] env[68282]: DEBUG oslo_concurrency.lockutils [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] Lock "c18ebfad-0052-4421-a626-380d0ef13b69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2517.273153] env[68282]: DEBUG oslo_concurrency.lockutils [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] Lock "c18ebfad-0052-4421-a626-380d0ef13b69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2517.273153] env[68282]: DEBUG nova.compute.manager [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] No waiting events found dispatching network-vif-plugged-ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2517.273153] env[68282]: WARNING nova.compute.manager [req-6f50ac75-a4ca-4be4-aa2d-dd86a0eb624d req-c9ab36d7-6621-4d3f-842e-691bd424e425 service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Received unexpected event network-vif-plugged-ec74b034-409b-4f1c-b5ff-65b523b45b85 for instance with vm_state building and task_state spawning. [ 2517.354214] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Successfully updated port: ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2517.363707] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquiring lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2517.364372] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquired lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2517.364680] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Building network info cache for instance {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2517.409818] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Instance cache missing network info. {{(pid=68282) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2517.580612] env[68282]: DEBUG nova.network.neutron [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Updating instance_info_cache with network_info: [{"id": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "address": "fa:16:3e:c7:8a:47", "network": {"id": "d9c06079-8999-4a49-9aa0-72ddea043581", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-355047719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "635f792088b34af1b0c66b28f087c963", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec74b034-40", "ovs_interfaceid": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2517.593639] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Releasing lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2517.593943] env[68282]: DEBUG nova.compute.manager [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Instance network_info: |[{"id": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "address": "fa:16:3e:c7:8a:47", "network": {"id": "d9c06079-8999-4a49-9aa0-72ddea043581", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-355047719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "635f792088b34af1b0c66b28f087c963", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec74b034-40", "ovs_interfaceid": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68282) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2517.594925] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:8a:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec74b034-409b-4f1c-b5ff-65b523b45b85', 'vif_model': 'vmxnet3'}] {{(pid=68282) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2517.602279] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Creating folder: Project (635f792088b34af1b0c66b28f087c963). Parent ref: group-v693573. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2517.603304] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-456de194-d029-4aa3-b573-f3a6c1fec674 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2517.613253] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Created folder: Project (635f792088b34af1b0c66b28f087c963) in parent group-v693573. [ 2517.613476] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Creating folder: Instances. Parent ref: group-v693688. {{(pid=68282) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2517.613715] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4fcff61-917e-417a-82c4-8d92091ddef9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2517.622302] env[68282]: INFO nova.virt.vmwareapi.vm_util [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Created folder: Instances in parent group-v693688. [ 2517.622573] env[68282]: DEBUG oslo.service.loopingcall [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68282) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2517.622768] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Creating VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2517.622968] env[68282]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d6acf3-13b0-4ee1-88e5-040fb17b86b9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2517.643212] env[68282]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2517.643212] env[68282]: value = "task-3470665" [ 2517.643212] env[68282]: _type = "Task" [ 2517.643212] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2517.652160] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470665, 'name': CreateVM_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2518.153050] env[68282]: DEBUG oslo_vmware.api [-] Task: {'id': task-3470665, 'name': CreateVM_Task, 'duration_secs': 0.301589} completed successfully. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2518.153050] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Created VM on the ESX host {{(pid=68282) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2518.153547] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2518.153732] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2518.154078] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2518.154330] env[68282]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46b3903d-5302-4729-9108-404dc808fcdb {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2518.158555] env[68282]: DEBUG oslo_vmware.api [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Waiting for the task: (returnval){ [ 2518.158555] env[68282]: value = "session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52803ef3-7f3e-f365-bea1-60b49e186317" [ 2518.158555] env[68282]: _type = "Task" [ 2518.158555] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2518.165534] env[68282]: DEBUG oslo_vmware.api [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Task: {'id': session[520b2d88-7ed8-1bbe-97c4-ef9deba3ae2a]52803ef3-7f3e-f365-bea1-60b49e186317, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2518.669543] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2518.669934] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Processing image 658717f1-7b98-47ed-bf66-8ef1a68a7047 {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2518.669982] env[68282]: DEBUG oslo_concurrency.lockutils [None req-092301a4-bb6f-4903-9564-b7f87af1ce07 tempest-ServerMetadataTestJSON-323109963 tempest-ServerMetadataTestJSON-323109963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2519.301316] env[68282]: DEBUG nova.compute.manager [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Received event network-changed-ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11166}} [ 2519.301532] env[68282]: DEBUG nova.compute.manager [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Refreshing instance network info cache due to event network-changed-ec74b034-409b-4f1c-b5ff-65b523b45b85. {{(pid=68282) external_instance_event /opt/stack/nova/nova/compute/manager.py:11171}} [ 2519.301750] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] Acquiring lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2519.301894] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] Acquired lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2519.302072] env[68282]: DEBUG nova.network.neutron [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Refreshing network info cache for port ec74b034-409b-4f1c-b5ff-65b523b45b85 {{(pid=68282) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2519.565729] env[68282]: DEBUG nova.network.neutron [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Updated VIF entry in instance network info cache for port ec74b034-409b-4f1c-b5ff-65b523b45b85. {{(pid=68282) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2519.566105] env[68282]: DEBUG nova.network.neutron [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Updating instance_info_cache with network_info: [{"id": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "address": "fa:16:3e:c7:8a:47", "network": {"id": "d9c06079-8999-4a49-9aa0-72ddea043581", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-355047719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "635f792088b34af1b0c66b28f087c963", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec74b034-40", "ovs_interfaceid": "ec74b034-409b-4f1c-b5ff-65b523b45b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68282) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2519.578414] env[68282]: DEBUG oslo_concurrency.lockutils [req-6ffaecbb-10b6-4bc4-87be-25cac34de0e5 req-9caa0b0a-d95c-4b0d-a9b5-f9a1d32a599b service nova] Releasing lock "refresh_cache-c18ebfad-0052-4421-a626-380d0ef13b69" {{(pid=68282) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2524.733868] env[68282]: DEBUG oslo_concurrency.lockutils [None req-ed9404d8-b2fb-403a-bfcf-e0f45979c421 tempest-ServerShowV257Test-2038982264 tempest-ServerShowV257Test-2038982264-project-member] Acquiring lock "6452c865-064a-4eaf-9689-e7d056d370df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2535.096642] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager.update_available_resource {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2535.111279] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2535.111538] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2535.111715] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2535.111874] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68282) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2535.113058] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7aebd1d-2f79-446f-aad1-28e88ae93929 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.122075] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c6acc4-f90b-4b98-85bd-a5fcf6925cfd {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.140069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73145084-fef4-48f5-910e-d8c94e932fb9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.148627] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d23d371-6eb1-4089-b754-8db0da28b2c4 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.177539] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180917MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68282) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2535.177696] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2535.177921] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2535.255312] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 2d391110-46da-475d-b324-d4bb6e13b4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.255485] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance 6452c865-064a-4eaf-9689-e7d056d370df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.255616] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance f516baac-8c80-4223-8b68-3779bd785f0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.255743] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Instance c18ebfad-0052-4421-a626-380d0ef13b69 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68282) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2535.255928] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2535.256086] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68282) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2535.273104] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing inventories for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2535.286481] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating ProviderTree inventory for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2535.286668] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Updating inventory in ProviderTree for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2535.298096] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing aggregate associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, aggregates: None {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2535.315442] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Refreshing trait associations for resource provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68282) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2535.372577] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472a3897-7271-4c7c-b5f4-577c655e6704 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.380069] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf66fcc0-c44c-4204-bb11-caf4e197cdea {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.410094] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee55f8b-f62b-4cb4-8f01-9422a46e6908 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.416655] env[68282]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2450360-2945-467c-8179-2dad197ed5f9 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2535.429319] env[68282]: DEBUG nova.compute.provider_tree [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed in ProviderTree for provider: 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e {{(pid=68282) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2535.439451] env[68282]: DEBUG nova.scheduler.client.report [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Inventory has not changed for provider 1e3e1b9d-72b2-4f7c-b9af-ad4eb2cc9e9e based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68282) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2535.454742] env[68282]: DEBUG nova.compute.resource_tracker [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68282) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2535.454996] env[68282]: DEBUG oslo_concurrency.lockutils [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.277s {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2536.353490] env[68282]: DEBUG oslo_concurrency.lockutils [None req-b4e400f4-4fa5-4ce7-8597-2190831b9e55 tempest-ServersTestJSON-115846852 tempest-ServersTestJSON-115846852-project-member] Acquiring lock "f516baac-8c80-4223-8b68-3779bd785f0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68282) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2536.446140] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2536.446329] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68282) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10582}} [ 2540.088084] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2540.088466] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Starting heal instance info cache {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9963}} [ 2540.088466] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Rebuilding the list of instances to heal {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9967}} [ 2540.104096] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2540.104269] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: 6452c865-064a-4eaf-9689-e7d056d370df] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2540.104393] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: f516baac-8c80-4223-8b68-3779bd785f0c] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2540.104522] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] [instance: c18ebfad-0052-4421-a626-380d0ef13b69] Skipping network cache update for instance because it is Building. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9976}} [ 2540.104646] env[68282]: DEBUG nova.compute.manager [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Didn't find any instances for network info cache update. {{(pid=68282) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10049}} [ 2540.105160] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2541.087230] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2543.087428] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2544.087623] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2544.087989] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2546.084507] env[68282]: DEBUG oslo_service.periodic_task [None req-71b41526-e72d-47b7-ad94-a23990908c5d None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68282) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2557.956663] env[68282]: WARNING oslo_vmware.rw_handles [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles response.begin() [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2557.956663] env[68282]: ERROR oslo_vmware.rw_handles [ 2557.957499] env[68282]: DEBUG nova.virt.vmwareapi.images [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Downloaded image file data 658717f1-7b98-47ed-bf66-8ef1a68a7047 to vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk on the data store datastore2 {{(pid=68282) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2557.959239] env[68282]: DEBUG nova.virt.vmwareapi.vmops [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] [instance: 2d391110-46da-475d-b324-d4bb6e13b4fa] Caching image {{(pid=68282) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2557.959485] env[68282]: DEBUG nova.virt.vmwareapi.vm_util [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Copying Virtual Disk [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/tmp-sparse.vmdk to [datastore2] vmware_temp/79e2dacc-99f8-4d81-968e-7eccee0e8ebf/658717f1-7b98-47ed-bf66-8ef1a68a7047/658717f1-7b98-47ed-bf66-8ef1a68a7047.vmdk {{(pid=68282) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2557.959777] env[68282]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e881b0d-d639-468f-9e0e-c26c9142d242 {{(pid=68282) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2557.967516] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Waiting for the task: (returnval){ [ 2557.967516] env[68282]: value = "task-3470666" [ 2557.967516] env[68282]: _type = "Task" [ 2557.967516] env[68282]: } to complete. {{(pid=68282) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2557.975489] env[68282]: DEBUG oslo_vmware.api [None req-48dc9e30-c535-4a8e-851d-3e97be43dc8c tempest-ServerGroupTestJSON-534504489 tempest-ServerGroupTestJSON-534504489-project-member] Task: {'id': task-3470666, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68282) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}}